Diff of the two buildlogs: -- --- b1/build.log 2024-03-25 08:26:39.668213966 +0000 +++ b2/build.log 2024-03-25 08:30:56.197208006 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Sun Mar 24 20:15:37 -12 2024 -I: pbuilder-time-stamp: 1711354537 +I: Current time: Mon Mar 25 22:26:49 +14 2024 +I: pbuilder-time-stamp: 1711355209 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/trixie-reproducible-base.tgz] I: copying local configuration @@ -32,52 +32,84 @@ dpkg-source: info: applying 0001-Version-bump-to-pyyaml-5.4.1-1596.patch I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/6701/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/D01_modify_environment starting +debug: Running on virt32z. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 Mar 25 08:27 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build/reproducible-path' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='armhf' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=3 ' - DISTRIBUTION='trixie' - HOME='/root' - HOST_ARCH='armhf' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="21" [3]="1" [4]="release" [5]="arm-unknown-linux-gnueabihf") + BASH_VERSION='5.2.21(1)-release' + BUILDDIR=/build/reproducible-path + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=armhf + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=4 ' + DIRSTACK=() + DISTRIBUTION=trixie + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=arm + HOST_ARCH=armhf IFS=' ' - INVOCATION_ID='562068637a924c6e8f26d80819942deb' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='6701' - PS1='# ' - PS2='> ' + INVOCATION_ID=df9c589422414db2b8f84563d4b41fb0 + LANG=C + LANGUAGE=it_CH:it + LC_ALL=C + MACHTYPE=arm-unknown-linux-gnueabihf + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnueabihf + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=18368 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.NgQMBz1M/pbuilderrc_SHGV --distribution trixie --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/trixie-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.NgQMBz1M/b1 --logfile b1/build.log elasticsearch-curator_5.8.1-4.1.dsc' - SUDO_GID='113' - SUDO_UID='107' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://10.0.0.15:3142/' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.NgQMBz1M/pbuilderrc_fsZc --distribution trixie --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/trixie-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.NgQMBz1M/b2 --logfile b2/build.log elasticsearch-curator_5.8.1-4.1.dsc' + SUDO_GID=110 + SUDO_UID=103 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://10.0.0.15:3142/ I: uname -a - Linux virt64b 6.1.0-18-arm64 #1 SMP Debian 6.1.76-1 (2024-02-01) aarch64 GNU/Linux + Linux i-capture-the-hostname 6.1.0-18-armmp-lpae #1 SMP Debian 6.1.76-1 (2024-02-01) armv7l GNU/Linux I: ls -l /bin - lrwxrwxrwx 1 root root 7 Mar 23 11:24 /bin -> usr/bin -I: user script /srv/workspace/pbuilder/6701/tmp/hooks/D02_print_environment finished + lrwxrwxrwx 1 root root 7 Mar 22 11:24 /bin -> usr/bin +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -276,7 +308,7 @@ Get: 115 http://deb.debian.org/debian trixie/main armhf python3-voluptuous all 0.14.2-1 [45.8 kB] Get: 116 http://deb.debian.org/debian trixie/main armhf python3-yaml armhf 6.0.1-2 [162 kB] Get: 117 http://deb.debian.org/debian trixie/main armhf w3m armhf 0.5.3+git20230121-2+b2 [1017 kB] -Fetched 69.3 MB in 18s (3927 kB/s) +Fetched 69.3 MB in 1s (69.3 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package fonts-lato. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19577 files and directories currently installed.) @@ -655,8 +687,8 @@ Setting up tzdata (2024a-1) ... Current default time zone: 'Etc/UTC' -Local time is now: Mon Mar 25 08:21:05 UTC 2024. -Universal Time is now: Mon Mar 25 08:21:05 UTC 2024. +Local time is now: Mon Mar 25 08:27:51 UTC 2024. +Universal Time is now: Mon Mar 25 08:27:51 UTC 2024. Run 'dpkg-reconfigure tzdata' if you wish to change it. Setting up autotools-dev (20220109.1) ... @@ -796,7 +828,11 @@ fakeroot is already the newest version (1.33-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/reproducible-path/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/A99_set_merged_usr starting +Not re-configuring usrmerge for trixie +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/reproducible-path/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes dpkg-buildpackage: info: source package elasticsearch-curator dpkg-buildpackage: info: source version 5.8.1-4.1 dpkg-buildpackage: info: source distribution unstable @@ -861,20 +897,20 @@ running build running build_py creating /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator running egg_info creating elasticsearch_curator.egg-info writing elasticsearch_curator.egg-info/PKG-INFO @@ -1069,20 +1105,20 @@ running build running build_py creating /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator running egg_info writing elasticsearch_curator.egg-info/PKG-INFO writing dependency_links to elasticsearch_curator.egg-info/dependency_links.txt @@ -1375,7 +1411,7 @@ /build/reproducible-path/elasticsearch-curator-5.8.1/test/integration/test_count_pattern.py:78: SyntaxWarning: invalid escape sequence '\d' '\'^(a|b)-\d{4}\.\d{2}\.\d{2}$\'', 'true', 'name', '\'%Y.%m.%d\'', 'true', 1 Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.011s] +GET http://localhost:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1429,10 +1465,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1486,10 +1522,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1543,10 +1579,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1600,10 +1636,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1657,10 +1693,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1714,10 +1750,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1771,10 +1807,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.010s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1828,10 +1864,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1885,10 +1921,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1942,10 +1978,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1999,10 +2035,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2056,10 +2092,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2113,10 +2149,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2170,7 +2206,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -2227,10 +2263,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2284,10 +2320,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2341,10 +2377,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2398,10 +2434,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2455,10 +2491,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2512,10 +2548,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2569,10 +2605,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2626,7 +2662,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.004s] @@ -2683,10 +2719,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2740,10 +2776,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2797,10 +2833,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2854,10 +2890,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2911,10 +2947,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2968,10 +3004,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3025,10 +3061,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3082,10 +3118,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3139,10 +3175,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3196,10 +3232,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3253,7 +3289,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -3310,10 +3346,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3367,10 +3403,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3424,10 +3460,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3481,10 +3517,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.011s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3538,10 +3574,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3595,10 +3631,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3652,10 +3688,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3709,10 +3745,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3766,10 +3802,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3823,7 +3859,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3880,10 +3916,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3937,10 +3973,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3994,10 +4030,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4051,10 +4087,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4108,10 +4144,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4165,10 +4201,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4222,10 +4258,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4279,10 +4315,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4336,10 +4372,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4393,10 +4429,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4450,7 +4486,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -4507,10 +4543,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4564,10 +4600,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4621,10 +4657,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4678,10 +4714,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4735,10 +4771,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4792,10 +4828,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4849,10 +4885,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4906,7 +4942,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -4963,10 +4999,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5020,10 +5056,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5077,10 +5113,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5134,10 +5170,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5191,10 +5227,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5248,10 +5284,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5305,10 +5341,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5362,10 +5398,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5419,10 +5455,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5476,10 +5512,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5533,10 +5569,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5590,10 +5626,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5647,10 +5683,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5704,7 +5740,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5761,10 +5797,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5818,10 +5854,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5875,10 +5911,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5932,10 +5968,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5989,10 +6025,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6046,10 +6082,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6103,10 +6139,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6160,10 +6196,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.006s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6217,10 +6253,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6274,7 +6310,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6331,7 +6367,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6388,10 +6424,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6445,7 +6481,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -6502,7 +6538,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -6559,10 +6595,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6616,10 +6652,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6673,7 +6709,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6730,10 +6766,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6787,7 +6823,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6844,7 +6880,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6901,7 +6937,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -6958,10 +6994,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -7015,10 +7051,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.008s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -7072,7 +7108,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -8428,7 +8464,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -8464,7 +8500,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -9780,9 +9816,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711268703) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711269002) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711268703) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711269002) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9835,9 +9871,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711268703) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711269002) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711268703) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711269002) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9964,9 +10000,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711268703) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711269002) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711268703) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711269002) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9987,9 +10023,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711268703) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711269002) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711268703) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711269002) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -10033,9 +10069,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711268703) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711269002) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711268703) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711269002) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -10056,9 +10092,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711268703) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711269002) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711268703) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711269002) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -11377,7 +11413,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -11410,7 +11446,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -11442,7 +11478,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -11471,7 +11507,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -11506,7 +11542,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11534,7 +11570,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11565,7 +11601,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11585,7 +11621,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -11633,7 +11669,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -11653,7 +11689,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -11677,7 +11713,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -11711,7 +11747,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -11748,7 +11784,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -11795,7 +11831,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11844,30 +11880,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711268704) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711269003) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711268704) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711269003) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -11876,14 +11912,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -11894,7 +11930,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -11912,11 +11948,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711268704) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711269003) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711268704) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711269003) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -11926,11 +11962,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1711268704) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1711269003) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1711268704) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1711269003) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -11940,14 +11976,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1711268704) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1711269003) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -11957,11 +11993,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1711268704) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1711269003) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -11971,13 +12007,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268704 +Point of Reference: 1711269003 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -12226,7 +12262,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.011s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12257,7 +12293,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.007s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12287,7 +12323,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12300,7 +12336,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12313,9 +12349,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -12323,7 +12359,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.004s] +GET https://127.0.0.1:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12353,7 +12389,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.006s] +GET https://127.0.0.1:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12383,7 +12419,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12396,7 +12432,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12409,9 +12445,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12423,7 +12459,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.006s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12453,7 +12489,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12466,7 +12502,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12479,9 +12515,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12489,7 +12525,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.006s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12519,7 +12555,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12532,7 +12568,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12545,11 +12581,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -12624,7 +12660,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -12642,93 +12678,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0xf600b898> +......Schema: .f at 0xb4fd2cf8> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'epoch': None, 'exclude': False, 'stats_result': 'min_value'} -.Schema: .f at 0xf6006898> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} +.Schema: .f at 0xb5146258> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'epoch': None, 'exclude': False, 'stats_result': 'min_value'} -.Schema: .f at 0xf6006d48> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} +.Schema: .f at 0xb4fd2bb8> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0xf60062a8> +.Schema: .f at 0xb4fd2b68> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0xf60062a8> +.Schema: .f at 0xb4fd2b68> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0xf60062a8> +.Schema: .f at 0xb4fd2b68> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'timestring': None, 'use_age': False, 'stats_result': 'min_value'} -.Schema: .f at 0xf60062a8> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'timestring': None, 'use_age': False} +.Schema: .f at 0xb51a17f8> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0xf5e91f78> +.Schema: .f at 0xb513f988> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0xf5e91f78> +.Schema: .f at 0xb513f988> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0xf5e91f78> +.Schema: .f at 0xb513f988> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0xf5e91ed8> +.Schema: .f at 0xb513fd48> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'reverse': True, 'timestring': None, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} -.Schema: .f at 0xf60063e8> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'timestring': None, 'reverse': True} +.Schema: .f at 0xb5146b68> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'reverse': True, 'timestring': None, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} -.Schema: .f at 0xf5ebc028> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'timestring': None, 'reverse': True} +.Schema: .f at 0xb4ffb078> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0xf6006258> +.Schema: .f at 0xb51463e8> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} -.Schema: .f at 0xf6006668> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0xb5146d48> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0xf6006258> +.Schema: .f at 0xb51463e8> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} -.Schema: .f at 0xf5fff5c8> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0xb513fcf8> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -12769,7 +12805,7 @@ """ :882: SyntaxWarning: invalid escape sequence '\d' ---------------------------------------------------------------------- -Ran 529 tests in 18.653s +Ran 529 tests in 15.779s OK (SKIP=3) I: pybuild base:305: python3.11 setup.py test @@ -12841,7 +12877,7 @@ writing manifest file 'elasticsearch_curator.egg-info/SOURCES.txt' running build_ext Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12895,7 +12931,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -12952,7 +12988,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13009,7 +13045,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13066,7 +13102,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13123,7 +13159,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13180,10 +13216,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13237,10 +13273,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13294,10 +13330,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13351,7 +13387,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13408,7 +13444,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13465,7 +13501,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13522,10 +13558,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13579,7 +13615,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13636,7 +13672,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13693,10 +13729,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13750,7 +13786,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13807,7 +13843,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13864,7 +13900,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13921,7 +13957,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -13978,7 +14014,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14035,10 +14071,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14092,7 +14128,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14149,10 +14185,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14206,7 +14242,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14263,7 +14299,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14320,7 +14356,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14377,7 +14413,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14434,7 +14470,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14491,10 +14527,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14548,7 +14584,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14605,7 +14641,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14662,7 +14698,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14719,10 +14755,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14776,7 +14812,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14833,7 +14869,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14890,7 +14926,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14947,7 +14983,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -15004,7 +15040,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -15061,7 +15097,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -15118,10 +15154,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15175,10 +15211,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15232,10 +15268,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15289,10 +15325,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15346,10 +15382,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15403,10 +15439,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15460,10 +15496,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15517,10 +15553,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15574,7 +15610,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -15631,10 +15667,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15688,10 +15724,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.012s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15745,10 +15781,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15802,10 +15838,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.011s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15859,10 +15895,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15916,7 +15952,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -15973,7 +16009,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16030,7 +16066,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16087,7 +16123,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16144,7 +16180,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -16201,7 +16237,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16258,10 +16294,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.001s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16315,7 +16351,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16372,7 +16408,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16429,7 +16465,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16486,7 +16522,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16543,7 +16579,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16600,7 +16636,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16657,7 +16693,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16714,7 +16750,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16771,10 +16807,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16828,7 +16864,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16885,7 +16921,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16942,7 +16978,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16999,7 +17035,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17056,7 +17092,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17113,10 +17149,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17170,7 +17206,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17227,10 +17263,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17284,7 +17320,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17341,7 +17377,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17398,10 +17434,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17455,10 +17491,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17512,7 +17548,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17569,7 +17605,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17626,10 +17662,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17683,10 +17719,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17740,7 +17776,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17797,10 +17833,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17854,7 +17890,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17911,7 +17947,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17968,7 +18004,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -18025,10 +18061,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18082,10 +18118,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18139,10 +18175,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18196,7 +18232,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -18253,10 +18289,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.008s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18310,7 +18346,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -18367,7 +18403,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -18424,10 +18460,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18481,10 +18517,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18538,7 +18574,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -19892,7 +19928,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -19928,7 +19964,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -21244,9 +21280,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711268728) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711269024) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711268728) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711269024) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21299,9 +21335,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711268728) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711269024) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711268728) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711269024) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21428,9 +21464,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711268728) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711269024) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711268728) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711269024) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21451,9 +21487,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711268728) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1711269024) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711268728) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1711269024) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21497,9 +21533,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711268728) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711269024) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711268728) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711269024) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21520,9 +21556,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711268728) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1711269024) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711268728) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1711269024) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -22841,7 +22877,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -22874,7 +22910,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -22906,7 +22942,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -22935,7 +22971,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -22970,7 +23006,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -22998,7 +23034,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -23029,7 +23065,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -23049,7 +23085,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -23097,7 +23133,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -23117,7 +23153,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -23141,7 +23177,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -23175,7 +23211,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -23212,7 +23248,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -23259,7 +23295,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -23308,30 +23344,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711268730) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711269025) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711268730) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711269025) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -23340,14 +23376,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -23358,7 +23394,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -23376,11 +23412,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711268730) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1711269025) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711268730) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1711269025) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -23390,11 +23426,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1711268730) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1711269025) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1711268730) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1711269025) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -23404,14 +23440,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1711268730) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1711269025) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -23421,11 +23457,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1711268730) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1711269025) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -23435,13 +23471,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1711268730 +Point of Reference: 1711269025 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -23690,7 +23726,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.011s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23721,7 +23757,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.011s] +GET https://127.0.0.1:9200/ [status:N/A request:0.008s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23751,7 +23787,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23764,7 +23800,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23777,9 +23813,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -23787,7 +23823,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.008s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23817,7 +23853,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.010s] +GET https://127.0.0.1:9200/ [status:N/A request:0.008s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23847,7 +23883,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23860,7 +23896,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23873,9 +23909,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -23887,7 +23923,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.010s] +GET https://127.0.0.1:9200/ [status:N/A request:0.008s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23917,7 +23953,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23930,7 +23966,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23943,9 +23979,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -23953,7 +23989,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.010s] +GET https://127.0.0.1:9200/ [status:N/A request:0.008s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23983,7 +24019,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23996,7 +24032,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -24009,11 +24045,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -24088,7 +24124,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -24106,93 +24142,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0xf5c2c988> +......Schema: .f at 0xb51cd7a8> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'stats_result': 'min_value', 'epoch': None, 'exclude': False} -.Schema: .f at 0xf5a97208> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'exclude': False, 'stats_result': 'min_value', 'timestring': None} +.Schema: .f at 0xb5038258> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'stats_result': 'min_value', 'epoch': None, 'exclude': False} -.Schema: .f at 0xf5a97f28> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'exclude': False, 'stats_result': 'min_value', 'timestring': None} +.Schema: .f at 0xb5086a78> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0xf5a97f78> +.Schema: .f at 0xb50867a8> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0xf5a97f28> +.Schema: .f at 0xb5086a78> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0xf5a97f78> +.Schema: .f at 0xb50867a8> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'timestring': None, 'use_age': False, 'stats_result': 'min_value'} -.Schema: .f at 0xf5a97f28> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'use_age': False, 'stats_result': 'min_value', 'timestring': None} +.Schema: .f at 0xb5086a78> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0xf5a97f78> +.Schema: .f at 0xb50867a8> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0xf5eefa28> +.Schema: .f at 0xb5490a78> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0xf5eef4d8> +.Schema: .f at 0xb5490528> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0xf5eefa28> +.Schema: .f at 0xb5490a78> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'timestring': None, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} -.Schema: .f at 0xf5c135c8> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'reverse': True, 'stats_result': 'min_value', 'timestring': None, 'threshold_behavior': 'greater_than'} +.Schema: .f at 0xb5086d98> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} -.Schema: .f at 0xf5ae5758> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'reverse': True, 'stats_result': 'min_value', 'timestring': None, 'threshold_behavior': 'greater_than'} +.Schema: .f at 0xb50b2e88> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0xf5c2cd98> +.Schema: .f at 0xb51cdbb8> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} -.Schema: .f at 0xf5c2c758> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than'} +.Schema: .f at 0xb51cd7a8> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0xf5c2cd98> +.Schema: .f at 0xb51cdbb8> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} -.Schema: .f at 0xf5c2c758> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than'} +.Schema: .f at 0xb51cd7a8> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -24228,7 +24264,7 @@ /build/reproducible-path/elasticsearch-curator-5.8.1/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? 'to' if job is 'add' else 'from', ---------------------------------------------------------------------- -Ran 529 tests in 18.715s +Ran 529 tests in 15.734s OK (SKIP=3) create-stamp debian/debhelper-build-stamp @@ -24418,97 +24454,97 @@ creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12 creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/_version.py to _version.cpython-312.pyc +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__main__.py to __main__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/singletons.py to singletons.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/logtools.py to logtools.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/utils.py to utils.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/config_utils.py to config_utils.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/actions.py to actions.cpython-312.pyc +/usr/lib/python3.12/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with 'str' literal. Did you mean "=="? +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/indexlist.py to indexlist.cpython-312.pyc +/usr/lib/python3.12/dist-packages/curator/indexlist.py:882: SyntaxWarning: invalid escape sequence '\d' +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/curator_cli.py to curator_cli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/options.py to options.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/config_file.py to config_file.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/actions.py to actions.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/filters.py to filters.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/__init__.py to __init__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/options.py to options.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/config_file.py to config_file.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/actions.py to actions.cpython-312.pyc -/usr/lib/python3.12/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with 'str' literal. Did you mean "=="? -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/close.py to close.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/filters.py to filters.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/_version.py to _version.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/settings.py to settings.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli.py to cli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/exceptions.py to exceptions.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/restore.py to restore.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/utils.py to utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/alias.py to alias.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/show.py to show.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/delete.py to delete.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/alias.py to alias.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/curator_cli.py to curator_cli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/config_utils.py to config_utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/utils.py to utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli.py to cli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/singletons.py to singletons.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__main__.py to __main__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/logtools.py to logtools.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__init__.py to __init__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/exceptions.py to exceptions.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/indexlist.py to indexlist.cpython-312.pyc -/usr/lib/python3.12/dist-packages/curator/indexlist.py:882: SyntaxWarning: invalid escape sequence '\d' -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/settings.py to settings.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/close.py to close.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/restore.py to restore.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-312.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -24691,96 +24727,96 @@ creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11 creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__main__.py to __main__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/config_utils.py to config_utils.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc +/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc -/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/close.py to close.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/settings.py to settings.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/restore.py to restore.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/utils.py to utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/delete.py to delete.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/config_utils.py to config_utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__main__.py to __main__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/settings.py to settings.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/close.py to close.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/restore.py to restore.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-311.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -24809,9 +24845,9 @@ dpkg-gencontrol: warning: package python-elasticsearch-curator-doc: substitution variable ${sphinxdoc:Built-Using} unused, but is defined dh_md5sums -O--buildsystem=pybuild dh_builddeb -O--buildsystem=pybuild -dpkg-deb: building package 'python-elasticsearch-curator-doc' in '../python-elasticsearch-curator-doc_5.8.1-4.1_all.deb'. dpkg-deb: building package 'elasticsearch-curator' in '../elasticsearch-curator_5.8.1-4.1_all.deb'. dpkg-deb: building package 'python3-elasticsearch-curator' in '../python3-elasticsearch-curator_5.8.1-4.1_all.deb'. +dpkg-deb: building package 'python-elasticsearch-curator-doc' in '../python-elasticsearch-curator-doc_5.8.1-4.1_all.deb'. dpkg-genbuildinfo --build=binary -O../elasticsearch-curator_5.8.1-4.1_armhf.buildinfo dpkg-genchanges --build=binary -O../elasticsearch-curator_5.8.1-4.1_armhf.changes dpkg-genchanges: info: binary-only upload (no source code included) @@ -24819,12 +24855,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: not including original source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/18368/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/6701 and its subdirectories -I: Current time: Sun Mar 24 20:26:26 -12 2024 -I: pbuilder-time-stamp: 1711355186 +I: removing directory /srv/workspace/pbuilder/18368 and its subdirectories +I: Current time: Mon Mar 25 22:30:50 +14 2024 +I: pbuilder-time-stamp: 1711355450