Diff of the two buildlogs: -- --- b1/build.log 2024-05-07 12:45:47.921068897 +0000 +++ b2/build.log 2024-05-07 12:51:22.074842339 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Mon Jun 9 07:05:23 -12 2025 -I: pbuilder-time-stamp: 1749495923 +I: Current time: Wed May 8 02:45:51 +14 2024 +I: pbuilder-time-stamp: 1715085951 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/unstable-reproducible-base.tgz] I: copying local configuration @@ -32,52 +32,84 @@ dpkg-source: info: applying 0001-Version-bump-to-pyyaml-5.4.1-1596.patch I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/3315162/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/D01_modify_environment starting +debug: Running on ionos1-amd64. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 May 7 12:46 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build/reproducible-path' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='amd64' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=42 ' - DISTRIBUTION='unstable' - HOME='/root' - HOST_ARCH='amd64' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="21" [3]="1" [4]="release" [5]="x86_64-pc-linux-gnu") + BASH_VERSION='5.2.21(1)-release' + BUILDDIR=/build/reproducible-path + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=amd64 + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=20 ' + DIRSTACK=() + DISTRIBUTION=unstable + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=x86_64 + HOST_ARCH=amd64 IFS=' ' - INVOCATION_ID='e51f78bb76774f61b14188eff3fb084d' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='3315162' - PS1='# ' - PS2='> ' + INVOCATION_ID=5fce0a2bc20c45d6a0524e868413494e + LANG=C + LANGUAGE=et_EE:et + LC_ALL=C + MACHTYPE=x86_64-pc-linux-gnu + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnu + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=3740557 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.k1mahlHs/pbuilderrc_dGou --distribution unstable --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/unstable-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.k1mahlHs/b1 --logfile b1/build.log elasticsearch-curator_5.8.1-4.1.dsc' - SUDO_GID='110' - SUDO_UID='105' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://213.165.73.152:3128' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.k1mahlHs/pbuilderrc_l86S --distribution unstable --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/unstable-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.k1mahlHs/b2 --logfile b2/build.log elasticsearch-curator_5.8.1-4.1.dsc' + SUDO_GID=110 + SUDO_UID=105 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://46.16.76.132:3128 I: uname -a - Linux ionos5-amd64 6.6.13+bpo-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.6.13-1~bpo12+1 (2024-02-15) x86_64 GNU/Linux + Linux i-capture-the-hostname 6.1.0-21-amd64 #1 SMP PREEMPT_DYNAMIC Debian 6.1.90-1 (2024-05-03) x86_64 GNU/Linux I: ls -l /bin - lrwxrwxrwx 1 root root 7 Jun 9 14:05 /bin -> usr/bin -I: user script /srv/workspace/pbuilder/3315162/tmp/hooks/D02_print_environment finished + lrwxrwxrwx 1 root root 7 May 7 07:43 /bin -> usr/bin +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -275,7 +307,7 @@ Get: 114 http://deb.debian.org/debian unstable/main amd64 python3-voluptuous all 0.14.2-1 [45.8 kB] Get: 115 http://deb.debian.org/debian unstable/main amd64 python3-yaml amd64 6.0.1-2 [177 kB] Get: 116 http://deb.debian.org/debian unstable/main amd64 w3m amd64 0.5.3+git20230121-2+b3 [1106 kB] -Fetched 71.2 MB in 1s (52.4 MB/s) +Fetched 71.2 MB in 4s (17.1 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package fonts-lato. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19699 files and directories currently installed.) @@ -656,8 +688,8 @@ Setting up tzdata (2024a-4) ... Current default time zone: 'Etc/UTC' -Local time is now: Mon Jun 9 19:06:56 UTC 2025. -Universal Time is now: Mon Jun 9 19:06:56 UTC 2025. +Local time is now: Tue May 7 12:48:33 UTC 2024. +Universal Time is now: Tue May 7 12:48:33 UTC 2024. Run 'dpkg-reconfigure tzdata' if you wish to change it. Setting up autotools-dev (20220109.1) ... @@ -795,7 +827,11 @@ fakeroot is already the newest version (1.34-1). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/reproducible-path/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/A99_set_merged_usr starting +Not re-configuring usrmerge for unstable +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/reproducible-path/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes dpkg-buildpackage: info: source package elasticsearch-curator dpkg-buildpackage: info: source version 5.8.1-4.1 dpkg-buildpackage: info: source distribution unstable @@ -860,20 +896,20 @@ running build running build_py creating /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator +copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator running egg_info creating elasticsearch_curator.egg-info writing elasticsearch_curator.egg-info/PKG-INFO @@ -1068,20 +1104,20 @@ running build running build_py creating /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator running egg_info writing elasticsearch_curator.egg-info/PKG-INFO writing dependency_links to elasticsearch_curator.egg-info/dependency_links.txt @@ -1374,7 +1410,7 @@ /build/reproducible-path/elasticsearch-curator-5.8.1/test/integration/test_count_pattern.py:78: SyntaxWarning: invalid escape sequence '\d' '\'^(a|b)-\d{4}\.\d{2}\.\d{2}$\'', 'true', 'name', '\'%Y.%m.%d\'', 'true', 1 Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1428,10 +1464,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1485,7 +1521,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -1542,10 +1578,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1599,10 +1635,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1656,10 +1692,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1713,10 +1749,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1770,10 +1806,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1827,7 +1863,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -1884,10 +1920,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1941,7 +1977,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -1998,7 +2034,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2055,7 +2091,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2112,10 +2148,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2169,10 +2205,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2226,10 +2262,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2283,10 +2319,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2340,7 +2376,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2397,10 +2433,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2454,7 +2490,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2511,7 +2547,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2568,7 +2604,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2625,7 +2661,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2682,7 +2718,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2739,7 +2775,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2796,7 +2832,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2853,7 +2889,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -2910,10 +2946,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2967,7 +3003,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3024,7 +3060,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3081,7 +3117,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3138,7 +3174,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3195,10 +3231,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3252,7 +3288,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3309,7 +3345,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3366,10 +3402,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3423,10 +3459,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3480,7 +3516,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3537,10 +3573,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3594,7 +3630,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3651,10 +3687,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3708,10 +3744,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3765,10 +3801,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3822,10 +3858,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3879,10 +3915,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3936,10 +3972,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3993,10 +4029,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4050,10 +4086,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4107,7 +4143,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -4164,10 +4200,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4221,7 +4257,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -4278,10 +4314,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4335,10 +4371,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4392,10 +4428,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4449,10 +4485,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4506,10 +4542,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4563,10 +4599,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4620,10 +4656,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4677,10 +4713,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4734,10 +4770,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4791,10 +4827,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4848,10 +4884,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4905,10 +4941,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4962,10 +4998,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5019,10 +5055,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5076,10 +5112,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5133,10 +5169,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5190,10 +5226,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5247,10 +5283,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5304,10 +5340,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5361,10 +5397,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5418,10 +5454,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5475,10 +5511,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5532,10 +5568,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5589,10 +5625,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5646,10 +5682,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5703,10 +5739,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5760,10 +5796,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5817,10 +5853,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5874,10 +5910,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5931,10 +5967,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5988,10 +6024,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6045,10 +6081,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6102,10 +6138,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6159,10 +6195,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6216,10 +6252,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6273,10 +6309,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6330,10 +6366,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6387,10 +6423,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6444,10 +6480,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6501,10 +6537,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6558,10 +6594,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6615,10 +6651,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6672,10 +6708,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6729,10 +6765,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6786,10 +6822,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6843,10 +6879,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6900,10 +6936,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6957,10 +6993,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -7014,10 +7050,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -7071,7 +7107,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -8427,7 +8463,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -8463,7 +8499,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -9779,9 +9815,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1749409695) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1714999839) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1749409695) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1714999839) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9834,9 +9870,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1714999839) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1714999839) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9963,9 +9999,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1749409695) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1714999839) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1749409695) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1714999839) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9986,9 +10022,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1749409695) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1714999839) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1749409695) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1714999839) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -10032,9 +10068,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1714999839) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1714999839) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -10055,9 +10091,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1749409695) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1714999839) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1749409695) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1714999839) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -11376,7 +11412,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -11409,7 +11445,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -11441,7 +11477,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -11470,7 +11506,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -11505,7 +11541,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11533,7 +11569,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11564,7 +11600,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11584,7 +11620,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -11632,7 +11668,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -11652,7 +11688,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -11676,7 +11712,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -11710,7 +11746,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -11747,7 +11783,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -11794,7 +11830,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11843,30 +11879,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1749409695) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1714999840) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1749409695) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1714999840) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -11875,14 +11911,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -11893,7 +11929,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -11911,11 +11947,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1749409695) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1714999840) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1749409695) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1714999840) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -11925,11 +11961,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1714999840) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1714999840) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -11939,14 +11975,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1749409695) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1714999840) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -11956,11 +11992,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1749409695) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1714999840) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -11970,13 +12006,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409695 +Point of Reference: 1714999840 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -12225,7 +12261,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.004s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12256,7 +12292,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12286,7 +12322,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12299,7 +12335,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12312,9 +12348,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -12322,7 +12358,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.001s] +GET https://127.0.0.1:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12352,7 +12388,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12382,7 +12418,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12395,7 +12431,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12408,9 +12444,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12422,7 +12458,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12452,7 +12488,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12465,7 +12501,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12478,9 +12514,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12488,7 +12524,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12518,7 +12554,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12531,7 +12567,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12544,11 +12580,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -12623,7 +12659,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -12641,93 +12677,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0x7ff2759c8ea0> +......Schema: .f at 0x7f3c498f0ea0> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'exclude': False, 'epoch': None, 'timestring': None} -.Schema: .f at 0x7ff2759c9080> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'exclude': False, 'timestring': None} +.Schema: .f at 0x7f3c498f1080> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'exclude': False, 'epoch': None, 'timestring': None} -.Schema: .f at 0x7ff2759c8ea0> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'exclude': False, 'timestring': None} +.Schema: .f at 0x7f3c498f0ea0> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0x7ff2759c8ea0> +.Schema: .f at 0x7f3c498f0ea0> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0x7ff2759b1800> +.Schema: .f at 0x7f3c498d9800> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0x7ff2759b0d60> +.Schema: .f at 0x7f3c498d8d60> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'use_age': False, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0x7ff2759b1800> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'use_age': False, 'timestring': None} +.Schema: .f at 0x7f3c498d9800> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0x7ff2759b2020> +.Schema: .f at 0x7f3c498da020> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0x7ff2759b1800> +.Schema: .f at 0x7f3c498d9800> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0x7ff2759b2020> +.Schema: .f at 0x7f3c498da020> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0x7ff2759b1800> +.Schema: .f at 0x7f3c498d9800> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'timestring': None, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} -.Schema: .f at 0x7ff2759b2020> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True, 'timestring': None} +.Schema: .f at 0x7f3c498da020> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'timestring': None, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} -.Schema: .f at 0x7ff2759077e0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True, 'timestring': None} +.Schema: .f at 0x7f3c4982f7e0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0x7ff275906b60> +.Schema: .f at 0x7f3c4982e840> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} -.Schema: .f at 0x7ff275906840> +.Schema: .f at 0x7f3c4982fce0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0x7ff275906b60> +.Schema: .f at 0x7f3c4982e840> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} -.Schema: .f at 0x7ff2759b39c0> +.Schema: .f at 0x7f3c498db9c0> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -12768,7 +12804,7 @@ """ :882: SyntaxWarning: invalid escape sequence '\d' ---------------------------------------------------------------------- -Ran 529 tests in 14.066s +Ran 529 tests in 15.284s OK (SKIP=3) I: pybuild base:311: python3.11 setup.py test @@ -12840,7 +12876,7 @@ writing manifest file 'elasticsearch_curator.egg-info/SOURCES.txt' running build_ext Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12894,10 +12930,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12951,10 +12987,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13008,10 +13044,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13065,10 +13101,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13122,10 +13158,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13179,10 +13215,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13236,10 +13272,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13293,10 +13329,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13350,10 +13386,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13407,10 +13443,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13464,10 +13500,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13521,10 +13557,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13578,10 +13614,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13635,10 +13671,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13692,10 +13728,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13749,10 +13785,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13806,10 +13842,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -13863,7 +13899,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -13920,7 +13956,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -13977,10 +14013,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14034,10 +14070,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14091,10 +14127,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14148,10 +14184,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14205,10 +14241,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14262,10 +14298,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14319,10 +14355,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14376,7 +14412,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -14433,7 +14469,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -14490,10 +14526,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14547,10 +14583,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14604,7 +14640,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -14661,10 +14697,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14718,10 +14754,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14775,10 +14811,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14832,10 +14868,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14889,10 +14925,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -14946,10 +14982,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15003,10 +15039,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15060,10 +15096,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15117,10 +15153,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15174,10 +15210,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15231,10 +15267,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15288,10 +15324,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15345,10 +15381,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15402,10 +15438,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15459,10 +15495,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15516,10 +15552,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15573,10 +15609,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15630,10 +15666,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15687,10 +15723,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15744,10 +15780,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15801,10 +15837,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15858,10 +15894,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15915,10 +15951,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -15972,10 +16008,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16029,10 +16065,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16086,10 +16122,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16143,10 +16179,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16200,10 +16236,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16257,10 +16293,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16314,10 +16350,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16371,10 +16407,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16428,10 +16464,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16485,10 +16521,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16542,10 +16578,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16599,10 +16635,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16656,10 +16692,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16713,10 +16749,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16770,10 +16806,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16827,10 +16863,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16884,10 +16920,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16941,10 +16977,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -16998,10 +17034,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17055,7 +17091,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17112,7 +17148,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17169,7 +17205,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17226,7 +17262,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17283,7 +17319,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17340,7 +17376,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17397,7 +17433,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17454,7 +17490,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17511,10 +17547,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17568,10 +17604,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17625,10 +17661,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17682,7 +17718,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17739,10 +17775,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17796,7 +17832,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.001s] @@ -17853,10 +17889,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17910,10 +17946,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -17967,10 +18003,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18024,10 +18060,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18081,10 +18117,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18138,10 +18174,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18195,10 +18231,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18252,10 +18288,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18309,10 +18345,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18366,10 +18402,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18423,10 +18459,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18480,10 +18516,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -18537,7 +18573,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -19891,7 +19927,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -19927,7 +19963,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -21243,9 +21279,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1749409712) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1714999861) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1749409712) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1714999861) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21298,9 +21334,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1749409712) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1714999861) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1749409712) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1714999861) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21427,9 +21463,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1749409712) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1714999861) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1749409712) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1714999861) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21450,9 +21486,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1749409712) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1714999861) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1749409712) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1714999861) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21496,9 +21532,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1749409712) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1714999861) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1749409712) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1714999861) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -21519,9 +21555,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1749409712) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1714999861) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1749409712) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1714999861) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -22840,7 +22876,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -22873,7 +22909,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -22905,7 +22941,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -22934,7 +22970,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -22969,7 +23005,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -22997,7 +23033,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -23028,7 +23064,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -23048,7 +23084,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -23096,7 +23132,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -23116,7 +23152,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -23140,7 +23176,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -23174,7 +23210,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -23211,7 +23247,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -23258,7 +23294,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -23307,30 +23343,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1749409713) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1714999861) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1749409713) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1714999861) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -23339,14 +23375,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -23357,7 +23393,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -23375,11 +23411,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1749409713) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1714999861) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1749409713) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1714999861) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -23389,11 +23425,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1749409713) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1714999861) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1749409713) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1714999861) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -23403,14 +23439,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1749409713) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1714999861) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -23420,11 +23456,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1749409713) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1714999861) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -23434,13 +23470,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1749409713 +Point of Reference: 1714999861 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -23689,7 +23725,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.004s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.008s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23720,7 +23756,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23750,7 +23786,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23763,7 +23799,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23776,9 +23812,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -23786,7 +23822,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.002s] +GET https://127.0.0.1:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23816,7 +23852,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23846,7 +23882,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23859,7 +23895,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23872,9 +23908,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -23886,7 +23922,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23916,7 +23952,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23929,7 +23965,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -23942,9 +23978,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -23952,7 +23988,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.003s] +GET https://127.0.0.1:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/reproducible-path/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -23982,7 +24018,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -23995,7 +24031,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -24008,11 +24044,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 513, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -24087,7 +24123,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -24105,93 +24141,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0x7f6c3115b7e0> +......Schema: .f at 0x7f70757637e0> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'exclude': False, 'epoch': None, 'stats_result': 'min_value', 'timestring': None} -.Schema: .f at 0x7f6c3115ac00> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} +.Schema: .f at 0x7f7075762c00> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'exclude': False, 'epoch': None, 'stats_result': 'min_value', 'timestring': None} -.Schema: .f at 0x7f6c31158f40> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'stats_result': 'min_value', 'timestring': None, 'exclude': False} +.Schema: .f at 0x7f7075760f40> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0x7f6c3115b420> +.Schema: .f at 0x7f7075763420> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0x7f6c31158f40> +.Schema: .f at 0x7f7075760f40> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0x7f6c3115be20> +.Schema: .f at 0x7f7075763e20> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'use_age': False, 'timestring': None} -.Schema: .f at 0x7f6c3115be20> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'stats_result': 'min_value', 'timestring': None, 'use_age': False} +.Schema: .f at 0x7f7075763e20> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0x7f6c31321760> +.Schema: .f at 0x7f707592b740> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0x7f6c3115be20> +.Schema: .f at 0x7f7075763e20> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0x7f6c31159620> +.Schema: .f at 0x7f7075761620> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0x7f6c311589a0> +.Schema: .f at 0x7f70757609a0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'timestring': None, 'reverse': True} -.Schema: .f at 0x7f6c31159620> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'reverse': True, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'timestring': None} +.Schema: .f at 0x7f7075761620> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'timestring': None, 'reverse': True} -.Schema: .f at 0x7f6c31321440> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'reverse': True, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'timestring': None} +.Schema: .f at 0x7f7075929440> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0x7f6c31321b20> +.Schema: .f at 0x7f7075929b20> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} -.Schema: .f at 0x7f6c313228e0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than'} +.Schema: .f at 0x7f707592a8e0> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0x7f6c313225c0> +.Schema: .f at 0x7f707592a5c0> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} -.Schema: .f at 0x7f6c313228e0> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'stats_result': 'min_value', 'threshold_behavior': 'greater_than'} +.Schema: .f at 0x7f707592a8e0> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -24227,7 +24263,7 @@ /build/reproducible-path/elasticsearch-curator-5.8.1/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? 'to' if job is 'add' else 'from', ---------------------------------------------------------------------- -Ran 529 tests in 13.729s +Ran 529 tests in 15.343s OK (SKIP=3) create-stamp debian/debhelper-build-stamp @@ -24417,97 +24453,97 @@ creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12 creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/_version.py to _version.cpython-312.pyc +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.12_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/filters.py to filters.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/config_file.py to config_file.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/actions.py to actions.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/options.py to options.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/__init__.py to __init__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/__init__.py to __init__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/settings.py to settings.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/config_utils.py to config_utils.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/exceptions.py to exceptions.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/singletons.py to singletons.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/logtools.py to logtools.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/indexlist.py to indexlist.cpython-312.pyc /usr/lib/python3.12/dist-packages/curator/indexlist.py:882: SyntaxWarning: invalid escape sequence '\d' -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/config_utils.py to config_utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/curator_cli.py to curator_cli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/logtools.py to logtools.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__main__.py to __main__.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/actions.py to actions.cpython-312.pyc /usr/lib/python3.12/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with 'str' literal. Did you mean "=="? -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/actions.py to actions.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/filters.py to filters.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/config_file.py to config_file.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/__init__.py to __init__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/validators/options.py to options.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/singletons.py to singletons.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__init__.py to __init__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/utils.py to utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli.py to cli.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/__main__.py to __main__.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/show.py to show.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/delete.py to delete.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/restore.py to restore.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/alias.py to alias.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/close.py to close.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/delete.py to delete.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-312.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/utils.py to utils.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/alias.py to alias.cpython-312.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/show.py to show.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/cli.py to cli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/utils.py to utils.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/curator_cli.py to curator_cli.cpython-312.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/curator/_version.py to _version.cpython-312.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.12/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -24690,96 +24726,96 @@ creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11 creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/settings.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/config_utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/repomgrcli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/snapshotlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__main__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator creating /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/reproducible-path/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/settings.py to settings.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/config_utils.py to config_utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc -/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__main__.py to __main__.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc +/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/delete.py to delete.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/restore.py to restore.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/close.py to close.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/delete.py to delete.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/utils.py to utils.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc -byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc +byte-compiling /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/reproducible-path/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -24809,8 +24845,8 @@ dh_md5sums -O--buildsystem=pybuild dh_builddeb -O--buildsystem=pybuild dpkg-deb: building package 'elasticsearch-curator' in '../elasticsearch-curator_5.8.1-4.1_all.deb'. -dpkg-deb: building package 'python3-elasticsearch-curator' in '../python3-elasticsearch-curator_5.8.1-4.1_all.deb'. dpkg-deb: building package 'python-elasticsearch-curator-doc' in '../python-elasticsearch-curator-doc_5.8.1-4.1_all.deb'. +dpkg-deb: building package 'python3-elasticsearch-curator' in '../python3-elasticsearch-curator_5.8.1-4.1_all.deb'. dpkg-genbuildinfo --build=binary -O../elasticsearch-curator_5.8.1-4.1_amd64.buildinfo dpkg-genchanges --build=binary -O../elasticsearch-curator_5.8.1-4.1_amd64.changes dpkg-genchanges: info: binary-only upload (no source code included) @@ -24818,12 +24854,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: not including original source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/3740557/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/3315162 and its subdirectories -I: Current time: Mon Jun 9 07:08:46 -12 2025 -I: pbuilder-time-stamp: 1749496126 +I: removing directory /srv/workspace/pbuilder/3740557 and its subdirectories +I: Current time: Wed May 8 02:51:21 +14 2024 +I: pbuilder-time-stamp: 1715086281