Diff of the two buildlogs: -- --- b1/build.log 2024-05-27 10:36:07.632324001 +0000 +++ b2/build.log 2024-05-27 10:40:02.014348081 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Sun Jun 29 04:55:59 -12 2025 -I: pbuilder-time-stamp: 1751216159 +I: Current time: Tue May 28 00:36:10 +14 2024 +I: pbuilder-time-stamp: 1716806170 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/unstable-reproducible-base.tgz] I: copying local configuration @@ -32,52 +32,84 @@ dpkg-source: info: applying 0001-Version-bump-to-pyyaml-5.4.1-1596.patch I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/2930692/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/D01_modify_environment starting +debug: Running on ionos1-amd64. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 May 27 10:36 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build/reproducible-path' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='amd64' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=42 ' - DISTRIBUTION='unstable' - HOME='/root' - HOST_ARCH='amd64' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="21" [3]="1" [4]="release" [5]="x86_64-pc-linux-gnu") + BASH_VERSION='5.2.21(1)-release' + BUILDDIR=/build/reproducible-path + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=amd64 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=20 ' + DIRSTACK=() + DISTRIBUTION=unstable + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=x86_64 + HOST_ARCH=amd64 IFS=' ' - INVOCATION_ID='250aca9a42ed4ac4852dbb9e12bc9bce' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='2930692' - PS1='# ' - PS2='> ' + INVOCATION_ID=66e427fc4206448f90ef636f3ae27bcb + LANG=C + LANGUAGE=et_EE:et + LC_ALL=C + MACHTYPE=x86_64-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=374002 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.EFkIby6u/pbuilderrc_J9Dl --distribution unstable --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/unstable-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.EFkIby6u/b1 --logfile b1/build.log elasticsearch-curator_5.8.1-4.1.dsc' - SUDO_GID='110' - SUDO_UID='105' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://213.165.73.152:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.EFkIby6u/pbuilderrc_MJX7 --distribution unstable --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/unstable-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.EFkIby6u/b2 --logfile b2/build.log elasticsearch-curator_5.8.1-4.1.dsc' + SUDO_GID=110 + SUDO_UID=105 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://46.16.76.132:3128 I: uname -a - Linux ionos5-amd64 6.7.12+bpo-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.7.12-1~bpo12+1 (2024-05-06) x86_64 GNU/Linux + Linux i-capture-the-hostname 6.1.0-21-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.90-1 (2024-05-03) x86_64 GNU/Linux I: ls -l /bin - lrwxrwxrwx 1 root root 7 Jun 29 14:05 /bin -> usr/bin -I: user script /srv/workspace/pbuilder/2930692/tmp/hooks/D02_print_environment finished + lrwxrwxrwx 1 root root 7 May 27 07:42 /bin -> usr/bin +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -274,7 +306,7 @@ Get: 113 http://deb.debian.org/debian unstable/main amd64 python3-voluptuous all 0.14.2-1 [45.8 kB] Get: 114 http://deb.debian.org/debian unstable/main amd64 python3-yaml amd64 6.0.1-2 [177 kB] Get: 115 http://deb.debian.org/debian unstable/main amd64 w3m amd64 0.5.3+git20230121-2+b3 [1106 kB] -Fetched 71.2 MB in 7s (10.2 MB/s) +Fetched 71.2 MB in 5s (13.0 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package fonts-lato. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19718 files and directories currently installed.) @@ -652,8 +684,8 @@ Setting up tzdata (2024a-4) ... Current default time zone: 'Etc/UTC' -Local time is now: Sun Jun 29 16:57:31 UTC 2025. -Universal Time is now: Sun Jun 29 16:57:31 UTC 2025. +Local time is now: Mon May 27 10:37:32 UTC 2024. +Universal Time is now: Mon May 27 10:37:32 UTC 2024. Run 'dpkg-reconfigure tzdata' if you wish to change it. Setting up autotools-dev (20220109.1) ... @@ -790,7 +822,11 @@ fakeroot is already the newest version (1.34-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/reproducible-path/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/A99_set_merged_usr starting +Not re-configuring usrmerge for unstable +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/reproducible-path/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes dpkg-buildpackage: info: source package elasticsearch-curator dpkg-buildpackage: info: source version 5.8.1-4.1 dpkg-buildpackage: info: source distribution unstable @@ -855,20 +891,20 @@ running build running build_py creating /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator running egg_info creating elasticsearch_curator.egg-info writing elasticsearch_curator.egg-info/PKG-INFO @@ -1063,20 +1099,20 @@ running build running build_py creating /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator running egg_info writing elasticsearch_curator.egg-info/PKG-INFO writing dependency_links to elasticsearch_curator.egg-info/dependency_links.txt @@ -1369,7 +1405,7 @@ /build/reproducible-path/elasticsearch-curator-5.8.1/test/integration/test_count_pattern.py:78: SyntaxWarning: invalid escape sequence '\d' '\'^(a|b)-\d{4}\.\d{2}\.\d{2}$\'', 'true', 'name', '\'%Y.%m.%d\'', 'true', 1 Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1423,10 +1459,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1480,10 +1516,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1537,10 +1573,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1594,10 +1630,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1651,10 +1687,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1708,10 +1744,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1765,10 +1801,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1822,10 +1858,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1879,10 +1915,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1936,10 +1972,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1993,10 +2029,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2050,10 +2086,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2107,10 +2143,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2164,10 +2200,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2221,10 +2257,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2278,10 +2314,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2335,10 +2371,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2392,10 +2428,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2449,10 +2485,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2506,10 +2542,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2563,10 +2599,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2620,10 +2656,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2677,10 +2713,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2734,10 +2770,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2791,10 +2827,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2848,10 +2884,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2905,10 +2941,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2962,10 +2998,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3019,10 +3055,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3076,10 +3112,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3133,10 +3169,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3190,10 +3226,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3247,10 +3283,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3304,10 +3340,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3361,10 +3397,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3418,10 +3454,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3475,10 +3511,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3532,10 +3568,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3589,10 +3625,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3646,10 +3682,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3703,10 +3739,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3760,10 +3796,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3817,10 +3853,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3874,10 +3910,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3931,10 +3967,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3988,10 +4024,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4045,10 +4081,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4102,10 +4138,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4159,10 +4195,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4216,10 +4252,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4273,10 +4309,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4330,10 +4366,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4387,10 +4423,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4444,10 +4480,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4501,10 +4537,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4558,10 +4594,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4615,10 +4651,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4672,10 +4708,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4729,10 +4765,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4786,10 +4822,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4843,10 +4879,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4900,10 +4936,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4957,10 +4993,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5014,10 +5050,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5071,10 +5107,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5128,10 +5164,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5185,10 +5221,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5242,10 +5278,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5299,10 +5335,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5356,10 +5392,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5413,10 +5449,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5470,7 +5506,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5527,10 +5563,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5584,10 +5620,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5641,10 +5677,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5698,10 +5734,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5755,10 +5791,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5812,10 +5848,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5869,10 +5905,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5926,10 +5962,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5983,10 +6019,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6040,10 +6076,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6097,10 +6133,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6154,10 +6190,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6211,10 +6247,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6268,10 +6304,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6325,10 +6361,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6382,10 +6418,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6439,10 +6475,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6496,10 +6532,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6553,10 +6589,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6610,10 +6646,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6667,10 +6703,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6724,10 +6760,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6781,10 +6817,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6838,10 +6874,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6895,10 +6931,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6952,10 +6988,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -7009,10 +7045,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -7066,7 +7102,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -8422,7 +8458,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -8458,7 +8494,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -9774,9 +9810,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1751129914) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1716719964) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1751129914) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1716719964) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9829,9 +9865,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1751129914) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1716719964) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1751129914) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1716719964) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9958,9 +9994,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1751129914) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1716719964) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1751129914) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1716719964) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9981,9 +10017,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1751129914) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1716719964) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1751129914) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1716719964) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -10027,9 +10063,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1751129915) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1716719964) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1751129915) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1716719964) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -10050,9 +10086,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1751129915) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1716719964) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1751129915) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1716719964) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -11371,7 +11407,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -11404,7 +11440,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -11436,7 +11472,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -11465,7 +11501,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -11500,7 +11536,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11528,7 +11564,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11559,7 +11595,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11579,7 +11615,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -11627,7 +11663,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -11647,7 +11683,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -11671,7 +11707,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -11705,7 +11741,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -11742,7 +11778,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -11789,7 +11825,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11838,30 +11874,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1751129915) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1716719965) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1751129915) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1716719965) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -11870,14 +11906,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -11888,7 +11924,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -11906,11 +11942,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1751129915) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1716719965) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1751129915) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1716719965) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -11920,11 +11956,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1751129915) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1716719965) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1751129915) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1716719965) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -11934,14 +11970,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1751129915) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1716719965) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -11951,11 +11987,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1751129915) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1716719965) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -11965,13 +12001,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129915 +Point of Reference: 1716719965 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -12220,7 +12256,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.004s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12251,7 +12287,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12281,7 +12317,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12294,7 +12330,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12307,9 +12343,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -12317,7 +12353,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.001s] +GET https://127.0.0.1:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12347,7 +12383,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12377,7 +12413,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12390,7 +12426,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12403,9 +12439,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12417,7 +12453,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12447,7 +12483,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12460,7 +12496,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12473,9 +12509,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12483,7 +12519,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12513,7 +12549,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12526,7 +12562,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12539,11 +12575,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -12618,7 +12654,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -12636,93 +12672,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0x7f92c2248ea0> +......Schema: .f at 0x7f2fbb984ea0> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'exclude': False, 'timestring': None, 'stats_result': 'min_value', 'epoch': None} -.Schema: .f at 0x7f92c2249080> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'epoch': None, 'exclude': False, 'timestring': None} +.Schema: .f at 0x7f2fbb985080> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'exclude': False, 'timestring': None, 'stats_result': 'min_value', 'epoch': None} -.Schema: .f at 0x7f92c2248ea0> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'epoch': None, 'exclude': False, 'timestring': None} +.Schema: .f at 0x7f2fbb984ea0> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0x7f92c2248ea0> +.Schema: .f at 0x7f2fbb984ea0> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0x7f92c2231800> +.Schema: .f at 0x7f2fbb96d800> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0x7f92c2230d60> +.Schema: .f at 0x7f2fbb96cd60> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'use_age': False, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0x7f92c2231800> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'timestring': None, 'use_age': False} +.Schema: .f at 0x7f2fbb96d800> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0x7f92c2232020> +.Schema: .f at 0x7f2fbb96e020> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0x7f92c2231800> +.Schema: .f at 0x7f2fbb96d800> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0x7f92c2232020> +.Schema: .f at 0x7f2fbb96e020> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0x7f92c2231800> +.Schema: .f at 0x7f2fbb96d800> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'threshold_behavior': 'greater_than', 'timestring': None, 'reverse': True, 'stats_result': 'min_value'} -.Schema: .f at 0x7f92c2232020> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'timestring': None} +.Schema: .f at 0x7f2fbb96e020> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'threshold_behavior': 'greater_than', 'timestring': None, 'reverse': True, 'stats_result': 'min_value'} -.Schema: .f at 0x7f92c21877e0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'timestring': None} +.Schema: .f at 0x7f2fbbb777e0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0x7f92c2186840> +.Schema: .f at 0x7f2fbbb76840> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'reverse': True, 'stats_result': 'min_value'} -.Schema: .f at 0x7f92c2187ce0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} +.Schema: .f at 0x7f2fbbb77ce0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0x7f92c2186840> +.Schema: .f at 0x7f2fbbb76840> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'reverse': True, 'stats_result': 'min_value'} -.Schema: .f at 0x7f92c22339c0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} +.Schema: .f at 0x7f2fbb96f9c0> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -12763,7 +12799,7 @@ """ :882: SyntaxWarning: invalid escape sequence '\d' ---------------------------------------------------------------------- -Ran 529 tests in 13.706s +Ran 529 tests in 14.919s OK (SKIP=3) I: pybuild base:311: python3.11 setup.py test @@ -12835,7 +12871,7 @@ writing manifest file 'elasticsearch_curator.egg-info/SOURCES.txt' running build_ext Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12889,10 +12925,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12946,10 +12982,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13003,10 +13039,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13060,10 +13096,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13117,10 +13153,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13174,10 +13210,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13231,10 +13267,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13288,10 +13324,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13345,10 +13381,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13402,10 +13438,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13459,10 +13495,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13516,10 +13552,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13573,10 +13609,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13630,10 +13666,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13687,10 +13723,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13744,10 +13780,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13801,10 +13837,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13858,10 +13894,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13915,10 +13951,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13972,10 +14008,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14029,10 +14065,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14086,10 +14122,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14143,10 +14179,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14200,10 +14236,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14257,10 +14293,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14314,10 +14350,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14371,10 +14407,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14428,10 +14464,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14485,10 +14521,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14542,10 +14578,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14599,10 +14635,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14656,10 +14692,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14713,7 +14749,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -14770,10 +14806,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14827,10 +14863,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14884,10 +14920,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14941,10 +14977,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14998,10 +15034,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15055,10 +15091,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15112,10 +15148,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15169,10 +15205,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15226,10 +15262,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15283,10 +15319,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15340,10 +15376,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15397,10 +15433,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15454,10 +15490,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15511,10 +15547,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15568,10 +15604,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15625,10 +15661,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15682,10 +15718,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15739,10 +15775,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15796,10 +15832,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15853,10 +15889,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15910,10 +15946,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15967,10 +16003,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16024,10 +16060,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16081,10 +16117,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16138,10 +16174,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16195,10 +16231,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16252,10 +16288,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16309,10 +16345,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16366,10 +16402,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16423,10 +16459,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16480,10 +16516,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16537,10 +16573,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16594,10 +16630,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16651,10 +16687,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16708,10 +16744,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16765,10 +16801,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16822,10 +16858,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16879,10 +16915,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16936,7 +16972,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -16993,10 +17029,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17050,10 +17086,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17107,10 +17143,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17164,10 +17200,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17221,10 +17257,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17278,7 +17314,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17335,10 +17371,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17392,10 +17428,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17449,10 +17485,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17506,10 +17542,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17563,10 +17599,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17620,7 +17656,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -17677,10 +17713,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17734,10 +17770,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17791,10 +17827,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17848,10 +17884,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17905,10 +17941,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17962,10 +17998,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18019,10 +18055,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18076,10 +18112,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18133,7 +18169,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -18190,10 +18226,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18247,10 +18283,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18304,10 +18340,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18361,10 +18397,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18418,10 +18454,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18475,10 +18511,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18532,7 +18568,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -19886,7 +19922,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -19922,7 +19958,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -21238,9 +21274,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1751129934) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1716719984) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1751129934) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1716719984) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21293,9 +21329,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1716719984) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1716719984) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21422,9 +21458,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1751129934) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1716719984) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1751129934) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1716719984) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21445,9 +21481,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1751129934) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1716719984) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1751129934) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1716719984) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21491,9 +21527,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1716719984) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1716719984) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21514,9 +21550,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1751129934) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1716719984) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1751129934) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1716719984) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -22835,7 +22871,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -22868,7 +22904,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -22900,7 +22936,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -22929,7 +22965,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -22964,7 +23000,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -22992,7 +23028,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -23023,7 +23059,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -23043,7 +23079,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -23091,7 +23127,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -23111,7 +23147,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -23135,7 +23171,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -23169,7 +23205,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -23206,7 +23242,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -23253,7 +23289,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -23302,30 +23338,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1751129934) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1716719985) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1751129934) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1716719985) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -23334,14 +23370,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -23352,7 +23388,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -23370,11 +23406,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1751129934) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1716719985) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1751129934) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1716719985) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -23384,11 +23420,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1716719985) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1716719985) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -23398,14 +23434,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1751129934) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1716719985) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -23415,11 +23451,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1751129934) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1716719985) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -23429,13 +23465,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1751129934 +Point of Reference: 1716719985 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -23684,7 +23720,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.004s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23715,7 +23751,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23745,7 +23781,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23758,7 +23794,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23771,9 +23807,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -23781,7 +23817,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23811,7 +23847,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23841,7 +23877,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23854,7 +23890,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23867,9 +23903,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -23881,7 +23917,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23911,7 +23947,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23924,7 +23960,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23937,9 +23973,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -23947,7 +23983,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23977,7 +24013,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23990,7 +24026,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -24003,11 +24039,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -24082,7 +24118,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -24100,93 +24136,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0x7f070246f7e0> +......Schema: .f at 0x7fa1ffa9f7e0> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} -.Schema: .f at 0x7f070246ec00> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'exclude': False, 'stats_result': 'min_value', 'epoch': None} +.Schema: .f at 0x7fa1ffa9ec00> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} -.Schema: .f at 0x7f070246cf40> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'exclude': False, 'stats_result': 'min_value', 'epoch': None} +.Schema: .f at 0x7fa1ffa9cf40> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0x7f070246f420> +.Schema: .f at 0x7fa1ffa9f420> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0x7f070246cf40> +.Schema: .f at 0x7fa1ffa9cf40> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0x7f070246fe20> +.Schema: .f at 0x7fa1ffa9fe20> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'use_age': False, 'timestring': None} -.Schema: .f at 0x7f070246fe20> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'timestring': None, 'use_age': False, 'stats_result': 'min_value'} +.Schema: .f at 0x7fa1ffa9fe20> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0x7f0702387740> +.Schema: .f at 0x7fa1ffc6b740> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0x7f070246fe20> +.Schema: .f at 0x7fa1ffa9fe20> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0x7f070246d620> +.Schema: .f at 0x7fa1ffa9d620> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0x7f070246c9a0> +.Schema: .f at 0x7fa1ffa9c9a0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'timestring': None, 'reverse': True} -.Schema: .f at 0x7f070246d620> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'threshold_behavior': 'greater_than', 'timestring': None, 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0x7fa1ffa9d620> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'timestring': None, 'reverse': True} -.Schema: .f at 0x7f0702385440> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'threshold_behavior': 'greater_than', 'timestring': None, 'stats_result': 'min_value', 'reverse': True} +.Schema: .f at 0x7fa1ffc69440> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0x7f0702385b20> +.Schema: .f at 0x7fa1ffc69b20> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} -.Schema: .f at 0x7f07023868e0> +.Schema: .f at 0x7fa1ffc6a8e0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0x7f07023865c0> +.Schema: .f at 0x7fa1ffc6a5c0> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'threshold_behavior': 'greater_than', 'stats_result': 'min_value', 'reverse': True} -.Schema: .f at 0x7f07023868e0> +.Schema: .f at 0x7fa1ffc6a8e0> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -24222,7 +24258,7 @@ /build/reproducible-path/elasticsearch-curator-5.8.1/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? 'to' if job is 'add' else 'from', ---------------------------------------------------------------------- -Ran 529 tests in 13.870s +Ran 529 tests in 14.914s OK (SKIP=3) create-stamp debian/debhelper-build-stamp @@ -24412,97 +24448,97 @@ creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12 creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/_version.py to _version.cpython-312.pyc +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/filters.py to filters.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/config_file.py to config_file.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/actions.py to actions.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/options.py to options.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/__init__.py to __init__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/__init__.py to __init__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/settings.py to settings.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/config_utils.py to config_utils.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/exceptions.py to exceptions.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/singletons.py to singletons.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/logtools.py to logtools.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/indexlist.py to indexlist.cpython-312.pyc /usr/lib/python3.12/dist-packages/curator/indexlist.py:882: SyntaxWarning: invalid escape sequence '\d' -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/config_utils.py to config_utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/curator_cli.py to curator_cli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/logtools.py to logtools.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__main__.py to __main__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/actions.py to actions.cpython-312.pyc /usr/lib/python3.12/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with 'str' literal. Did you mean "=="? -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/actions.py to actions.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/filters.py to filters.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/config_file.py to config_file.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/__init__.py to __init__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/options.py to options.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/singletons.py to singletons.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__init__.py to __init__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/utils.py to utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli.py to cli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__main__.py to __main__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/show.py to show.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/delete.py to delete.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/restore.py to restore.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/alias.py to alias.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/close.py to close.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/delete.py to delete.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/utils.py to utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/alias.py to alias.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/show.py to show.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli.py to cli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/utils.py to utils.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/curator_cli.py to curator_cli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/_version.py to _version.cpython-312.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -24685,96 +24721,96 @@ creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11 creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/settings.py to settings.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/config_utils.py to config_utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc -/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__main__.py to __main__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc +/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/delete.py to delete.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/restore.py to restore.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/close.py to close.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/delete.py to delete.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/utils.py to utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -24813,12 +24849,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: not including original source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/374002/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/2930692 and its subdirectories -I: Current time: Sun Jun 29 04:59:04 -12 2025 -I: pbuilder-time-stamp: 1751216344 +I: removing directory /srv/workspace/pbuilder/374002 and its subdirectories +I: Current time: Tue May 28 00:40:01 +14 2024 +I: pbuilder-time-stamp: 1716806401