Diff of the two buildlogs: -- --- b1/build.log 2023-05-15 19:07:01.488088618 +0000 +++ b2/build.log 2023-05-15 19:21:22.150356115 +0000 @@ -1,6 +1,6 @@ I: pbuilder: network access will be disabled during build -I: Current time: Mon May 15 07:03:26 -12 2023 -I: pbuilder-time-stamp: 1684177406 +I: Current time: Tue May 16 09:07:11 +14 2023 +I: pbuilder-time-stamp: 1684177631 I: Building the build Environment I: extracting base tarball [/var/cache/pbuilder/bookworm-reproducible-base.tgz] I: copying local configuration @@ -16,7 +16,7 @@ I: copying [./elasticsearch-curator_5.8.1.orig.tar.gz] I: copying [./elasticsearch-curator_5.8.1-4.1.debian.tar.xz] I: Extracting source -gpgv: Signature made Sat Mar 4 04:21:34 2023 -12 +gpgv: Signature made Sun Mar 5 06:21:34 2023 +14 gpgv: using RSA key 3AFA757FAC6EA11D2FF45DF088D24287A2D898B1 gpgv: Can't check signature: No public key dpkg-source: warning: cannot verify inline signature for ./elasticsearch-curator_5.8.1-4.1.dsc: no acceptable signature found @@ -32,135 +32,167 @@ dpkg-source: info: applying 0001-Version-bump-to-pyyaml-5.4.1-1596.patch I: using fakeroot in build. I: Installing the build-deps -I: user script /srv/workspace/pbuilder/22109/tmp/hooks/D02_print_environment starting +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/D01_modify_environment starting +debug: Running on virt32a. +I: Changing host+domainname to test build reproducibility +I: Adding a custom variable just for the fun of it... +I: Changing /bin/sh to bash +'/bin/sh' -> '/bin/bash' +lrwxrwxrwx 1 root root 9 May 16 09:09 /bin/sh -> /bin/bash +I: Setting pbuilder2's login shell to /bin/bash +I: Setting pbuilder2's GECOS to second user,second room,second work-phone,second home-phone,second other +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/D01_modify_environment finished +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/D02_print_environment starting I: set - BUILDDIR='/build' - BUILDUSERGECOS='first user,first room,first work-phone,first home-phone,first other' - BUILDUSERNAME='pbuilder1' - BUILD_ARCH='armhf' - DEBIAN_FRONTEND='noninteractive' - DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=3' - DISTRIBUTION='bookworm' - HOME='/root' - HOST_ARCH='armhf' + BASH=/bin/sh + BASHOPTS=checkwinsize:cmdhist:complete_fullquote:extquote:force_fignore:globasciiranges:globskipdots:hostcomplete:interactive_comments:patsub_replacement:progcomp:promptvars:sourcepath + BASH_ALIASES=() + BASH_ARGC=() + BASH_ARGV=() + BASH_CMDS=() + BASH_LINENO=([0]="12" [1]="0") + BASH_LOADABLES_PATH=/usr/local/lib/bash:/usr/lib/bash:/opt/local/lib/bash:/usr/pkg/lib/bash:/opt/pkg/lib/bash:. + BASH_SOURCE=([0]="/tmp/hooks/D02_print_environment" [1]="/tmp/hooks/D02_print_environment") + BASH_VERSINFO=([0]="5" [1]="2" [2]="15" [3]="1" [4]="release" [5]="arm-unknown-linux-gnueabihf") + BASH_VERSION='5.2.15(1)-release' + BUILDDIR=/build + BUILDUSERGECOS='second user,second room,second work-phone,second home-phone,second other' + BUILDUSERNAME=pbuilder2 + BUILD_ARCH=armhf + DEBIAN_FRONTEND=noninteractive + DEB_BUILD_OPTIONS='buildinfo=+all reproducible=+all parallel=4' + DIRSTACK=() + DISTRIBUTION=bookworm + EUID=0 + FUNCNAME=([0]="Echo" [1]="main") + GROUPS=() + HOME=/root + HOSTNAME=i-capture-the-hostname + HOSTTYPE=arm + HOST_ARCH=armhf IFS=' ' - INVOCATION_ID='6e32767555574dc88c255a69a0f04f27' - LANG='C' - LANGUAGE='en_US:en' - LC_ALL='C' - MAIL='/var/mail/root' - OPTIND='1' - PATH='/usr/sbin:/usr/bin:/sbin:/bin:/usr/games' - PBCURRENTCOMMANDLINEOPERATION='build' - PBUILDER_OPERATION='build' - PBUILDER_PKGDATADIR='/usr/share/pbuilder' - PBUILDER_PKGLIBDIR='/usr/lib/pbuilder' - PBUILDER_SYSCONFDIR='/etc' - PPID='22109' - PS1='# ' - PS2='> ' + INVOCATION_ID=3d588e6674334a97b7cfe08e7b64710c + LANG=C + LANGUAGE=it_CH:it + LC_ALL=C + MACHTYPE=arm-unknown-linux-gnueabihf + MAIL=/var/mail/root + OPTERR=1 + OPTIND=1 + OSTYPE=linux-gnueabihf + PATH=/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path + PBCURRENTCOMMANDLINEOPERATION=build + PBUILDER_OPERATION=build + PBUILDER_PKGDATADIR=/usr/share/pbuilder + PBUILDER_PKGLIBDIR=/usr/lib/pbuilder + PBUILDER_SYSCONFDIR=/etc + PIPESTATUS=([0]="0") + POSIXLY_CORRECT=y + PPID=26870 PS4='+ ' - PWD='/' - SHELL='/bin/bash' - SHLVL='2' - SUDO_COMMAND='/usr/bin/timeout -k 18.1h 18h /usr/bin/ionice -c 3 /usr/bin/nice /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.kB8C2ewA/pbuilderrc_dZa8 --distribution bookworm --hookdir /etc/pbuilder/first-build-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.kB8C2ewA/b1 --logfile b1/build.log elasticsearch-curator_5.8.1-4.1.dsc' - SUDO_GID='114' - SUDO_UID='108' - SUDO_USER='jenkins' - TERM='unknown' - TZ='/usr/share/zoneinfo/Etc/GMT+12' - USER='root' - _='/usr/bin/systemd-run' - http_proxy='http://10.0.0.15:3142/' + PWD=/ + SHELL=/bin/bash + SHELLOPTS=braceexpand:errexit:hashall:interactive-comments:posix + SHLVL=3 + SUDO_COMMAND='/usr/bin/timeout -k 24.1h 24h /usr/bin/ionice -c 3 /usr/bin/nice -n 11 /usr/bin/unshare --uts -- /usr/sbin/pbuilder --build --configfile /srv/reproducible-results/rbuild-debian/r-b-build.kB8C2ewA/pbuilderrc_uBVn --distribution bookworm --hookdir /etc/pbuilder/rebuild-hooks --debbuildopts -b --basetgz /var/cache/pbuilder/bookworm-reproducible-base.tgz --buildresult /srv/reproducible-results/rbuild-debian/r-b-build.kB8C2ewA/b2 --logfile b2/build.log --extrapackages usrmerge elasticsearch-curator_5.8.1-4.1.dsc' + SUDO_GID=113 + SUDO_UID=107 + SUDO_USER=jenkins + TERM=unknown + TZ=/usr/share/zoneinfo/Etc/GMT-14 + UID=0 + USER=root + _='I: set' + http_proxy=http://10.0.0.15:3142/ I: uname -a - Linux jtx1a 5.10.0-23-arm64 #1 SMP Debian 5.10.179-1 (2023-05-12) aarch64 GNU/Linux + Linux i-capture-the-hostname 5.10.0-23-armmp-lpae #1 SMP Debian 5.10.179-1 (2023-05-12) armv7l GNU/Linux I: ls -l /bin total 5072 - -rwxr-xr-x 1 root root 838488 Apr 23 09:24 bash - -rwxr-xr-x 3 root root 67144 Sep 18 2022 bunzip2 - -rwxr-xr-x 3 root root 67144 Sep 18 2022 bzcat - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzcmp -> bzdiff - -rwxr-xr-x 1 root root 2225 Sep 18 2022 bzdiff - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzegrep -> bzgrep - -rwxr-xr-x 1 root root 4893 Nov 27 2021 bzexe - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzfgrep -> bzgrep - -rwxr-xr-x 1 root root 3775 Sep 18 2022 bzgrep - -rwxr-xr-x 3 root root 67144 Sep 18 2022 bzip2 - -rwxr-xr-x 1 root root 67112 Sep 18 2022 bzip2recover - lrwxrwxrwx 1 root root 6 Sep 18 2022 bzless -> bzmore - -rwxr-xr-x 1 root root 1297 Sep 18 2022 bzmore - -rwxr-xr-x 1 root root 67632 Sep 20 2022 cat - -rwxr-xr-x 1 root root 67676 Sep 20 2022 chgrp - -rwxr-xr-x 1 root root 67644 Sep 20 2022 chmod - -rwxr-xr-x 1 root root 67684 Sep 20 2022 chown - -rwxr-xr-x 1 root root 133532 Sep 20 2022 cp - -rwxr-xr-x 1 root root 132868 Jan 5 01:20 dash - -rwxr-xr-x 1 root root 133220 Sep 20 2022 date - -rwxr-xr-x 1 root root 67732 Sep 20 2022 dd - -rwxr-xr-x 1 root root 68104 Sep 20 2022 df - -rwxr-xr-x 1 root root 133632 Sep 20 2022 dir - -rwxr-xr-x 1 root root 59128 Mar 22 21:02 dmesg - lrwxrwxrwx 1 root root 8 Dec 19 01:33 dnsdomainname -> hostname - lrwxrwxrwx 1 root root 8 Dec 19 01:33 domainname -> hostname - -rwxr-xr-x 1 root root 67560 Sep 20 2022 echo - -rwxr-xr-x 1 root root 41 Jan 24 02:43 egrep - -rwxr-xr-x 1 root root 67548 Sep 20 2022 false - -rwxr-xr-x 1 root root 41 Jan 24 02:43 fgrep - -rwxr-xr-x 1 root root 55748 Mar 22 21:02 findmnt - -rwsr-xr-x 1 root root 26208 Mar 22 20:15 fusermount - -rwxr-xr-x 1 root root 128608 Jan 24 02:43 grep - -rwxr-xr-x 2 root root 2346 Apr 9 2022 gunzip - -rwxr-xr-x 1 root root 6447 Apr 9 2022 gzexe - -rwxr-xr-x 1 root root 64220 Apr 9 2022 gzip - -rwxr-xr-x 1 root root 67032 Dec 19 01:33 hostname - -rwxr-xr-x 1 root root 67720 Sep 20 2022 ln - -rwxr-xr-x 1 root root 35132 Mar 22 21:51 login - -rwxr-xr-x 1 root root 133632 Sep 20 2022 ls - -rwxr-xr-x 1 root root 136808 Mar 22 21:02 lsblk - -rwxr-xr-x 1 root root 67800 Sep 20 2022 mkdir - -rwxr-xr-x 1 root root 67764 Sep 20 2022 mknod - -rwxr-xr-x 1 root root 67596 Sep 20 2022 mktemp - -rwxr-xr-x 1 root root 38504 Mar 22 21:02 more - -rwsr-xr-x 1 root root 38496 Mar 22 21:02 mount - -rwxr-xr-x 1 root root 9824 Mar 22 21:02 mountpoint - -rwxr-xr-x 1 root root 133532 Sep 20 2022 mv - lrwxrwxrwx 1 root root 8 Dec 19 01:33 nisdomainname -> hostname - lrwxrwxrwx 1 root root 14 Apr 2 18:25 pidof -> /sbin/killall5 - -rwxr-xr-x 1 root root 67608 Sep 20 2022 pwd - lrwxrwxrwx 1 root root 4 Apr 23 09:24 rbash -> bash - -rwxr-xr-x 1 root root 67600 Sep 20 2022 readlink - -rwxr-xr-x 1 root root 67672 Sep 20 2022 rm - -rwxr-xr-x 1 root root 67600 Sep 20 2022 rmdir - -rwxr-xr-x 1 root root 67400 Nov 2 2022 run-parts - -rwxr-xr-x 1 root root 133372 Jan 5 07:55 sed - lrwxrwxrwx 1 root root 4 Jan 5 01:20 sh -> dash - -rwxr-xr-x 1 root root 67584 Sep 20 2022 sleep - -rwxr-xr-x 1 root root 67644 Sep 20 2022 stty - -rwsr-xr-x 1 root root 50800 Mar 22 21:02 su - -rwxr-xr-x 1 root root 67584 Sep 20 2022 sync - -rwxr-xr-x 1 root root 336764 Apr 6 02:25 tar - -rwxr-xr-x 1 root root 67144 Nov 2 2022 tempfile - -rwxr-xr-x 1 root root 133224 Sep 20 2022 touch - -rwxr-xr-x 1 root root 67548 Sep 20 2022 true - -rwxr-xr-x 1 root root 9768 Mar 22 20:15 ulockmgr_server - -rwsr-xr-x 1 root root 22108 Mar 22 21:02 umount - -rwxr-xr-x 1 root root 67572 Sep 20 2022 uname - -rwxr-xr-x 2 root root 2346 Apr 9 2022 uncompress - -rwxr-xr-x 1 root root 133632 Sep 20 2022 vdir - -rwxr-xr-x 1 root root 42608 Mar 22 21:02 wdctl - lrwxrwxrwx 1 root root 8 Dec 19 01:33 ypdomainname -> hostname - -rwxr-xr-x 1 root root 1984 Apr 9 2022 zcat - -rwxr-xr-x 1 root root 1678 Apr 9 2022 zcmp - -rwxr-xr-x 1 root root 6460 Apr 9 2022 zdiff - -rwxr-xr-x 1 root root 29 Apr 9 2022 zegrep - -rwxr-xr-x 1 root root 29 Apr 9 2022 zfgrep - -rwxr-xr-x 1 root root 2081 Apr 9 2022 zforce - -rwxr-xr-x 1 root root 8103 Apr 9 2022 zgrep - -rwxr-xr-x 1 root root 2206 Apr 9 2022 zless - -rwxr-xr-x 1 root root 1842 Apr 9 2022 zmore - -rwxr-xr-x 1 root root 4577 Apr 9 2022 znew -I: user script /srv/workspace/pbuilder/22109/tmp/hooks/D02_print_environment finished + -rwxr-xr-x 1 root root 838488 Apr 24 11:24 bash + -rwxr-xr-x 3 root root 67144 Sep 19 2022 bunzip2 + -rwxr-xr-x 3 root root 67144 Sep 19 2022 bzcat + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzcmp -> bzdiff + -rwxr-xr-x 1 root root 2225 Sep 19 2022 bzdiff + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzegrep -> bzgrep + -rwxr-xr-x 1 root root 4893 Nov 28 2021 bzexe + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzfgrep -> bzgrep + -rwxr-xr-x 1 root root 3775 Sep 19 2022 bzgrep + -rwxr-xr-x 3 root root 67144 Sep 19 2022 bzip2 + -rwxr-xr-x 1 root root 67112 Sep 19 2022 bzip2recover + lrwxrwxrwx 1 root root 6 Sep 19 2022 bzless -> bzmore + -rwxr-xr-x 1 root root 1297 Sep 19 2022 bzmore + -rwxr-xr-x 1 root root 67632 Sep 21 2022 cat + -rwxr-xr-x 1 root root 67676 Sep 21 2022 chgrp + -rwxr-xr-x 1 root root 67644 Sep 21 2022 chmod + -rwxr-xr-x 1 root root 67684 Sep 21 2022 chown + -rwxr-xr-x 1 root root 133532 Sep 21 2022 cp + -rwxr-xr-x 1 root root 132868 Jan 6 03:20 dash + -rwxr-xr-x 1 root root 133220 Sep 21 2022 date + -rwxr-xr-x 1 root root 67732 Sep 21 2022 dd + -rwxr-xr-x 1 root root 68104 Sep 21 2022 df + -rwxr-xr-x 1 root root 133632 Sep 21 2022 dir + -rwxr-xr-x 1 root root 59128 Mar 23 23:02 dmesg + lrwxrwxrwx 1 root root 8 Dec 20 03:33 dnsdomainname -> hostname + lrwxrwxrwx 1 root root 8 Dec 20 03:33 domainname -> hostname + -rwxr-xr-x 1 root root 67560 Sep 21 2022 echo + -rwxr-xr-x 1 root root 41 Jan 25 04:43 egrep + -rwxr-xr-x 1 root root 67548 Sep 21 2022 false + -rwxr-xr-x 1 root root 41 Jan 25 04:43 fgrep + -rwxr-xr-x 1 root root 55748 Mar 23 23:02 findmnt + -rwsr-xr-x 1 root root 26208 Mar 23 22:15 fusermount + -rwxr-xr-x 1 root root 128608 Jan 25 04:43 grep + -rwxr-xr-x 2 root root 2346 Apr 10 2022 gunzip + -rwxr-xr-x 1 root root 6447 Apr 10 2022 gzexe + -rwxr-xr-x 1 root root 64220 Apr 10 2022 gzip + -rwxr-xr-x 1 root root 67032 Dec 20 03:33 hostname + -rwxr-xr-x 1 root root 67720 Sep 21 2022 ln + -rwxr-xr-x 1 root root 35132 Mar 23 23:51 login + -rwxr-xr-x 1 root root 133632 Sep 21 2022 ls + -rwxr-xr-x 1 root root 136808 Mar 23 23:02 lsblk + -rwxr-xr-x 1 root root 67800 Sep 21 2022 mkdir + -rwxr-xr-x 1 root root 67764 Sep 21 2022 mknod + -rwxr-xr-x 1 root root 67596 Sep 21 2022 mktemp + -rwxr-xr-x 1 root root 38504 Mar 23 23:02 more + -rwsr-xr-x 1 root root 38496 Mar 23 23:02 mount + -rwxr-xr-x 1 root root 9824 Mar 23 23:02 mountpoint + -rwxr-xr-x 1 root root 133532 Sep 21 2022 mv + lrwxrwxrwx 1 root root 8 Dec 20 03:33 nisdomainname -> hostname + lrwxrwxrwx 1 root root 14 Apr 3 20:25 pidof -> /sbin/killall5 + -rwxr-xr-x 1 root root 67608 Sep 21 2022 pwd + lrwxrwxrwx 1 root root 4 Apr 24 11:24 rbash -> bash + -rwxr-xr-x 1 root root 67600 Sep 21 2022 readlink + -rwxr-xr-x 1 root root 67672 Sep 21 2022 rm + -rwxr-xr-x 1 root root 67600 Sep 21 2022 rmdir + -rwxr-xr-x 1 root root 67400 Nov 3 2022 run-parts + -rwxr-xr-x 1 root root 133372 Jan 6 09:55 sed + lrwxrwxrwx 1 root root 9 May 16 09:09 sh -> /bin/bash + -rwxr-xr-x 1 root root 67584 Sep 21 2022 sleep + -rwxr-xr-x 1 root root 67644 Sep 21 2022 stty + -rwsr-xr-x 1 root root 50800 Mar 23 23:02 su + -rwxr-xr-x 1 root root 67584 Sep 21 2022 sync + -rwxr-xr-x 1 root root 336764 Apr 7 04:25 tar + -rwxr-xr-x 1 root root 67144 Nov 3 2022 tempfile + -rwxr-xr-x 1 root root 133224 Sep 21 2022 touch + -rwxr-xr-x 1 root root 67548 Sep 21 2022 true + -rwxr-xr-x 1 root root 9768 Mar 23 22:15 ulockmgr_server + -rwsr-xr-x 1 root root 22108 Mar 23 23:02 umount + -rwxr-xr-x 1 root root 67572 Sep 21 2022 uname + -rwxr-xr-x 2 root root 2346 Apr 10 2022 uncompress + -rwxr-xr-x 1 root root 133632 Sep 21 2022 vdir + -rwxr-xr-x 1 root root 42608 Mar 23 23:02 wdctl + lrwxrwxrwx 1 root root 8 Dec 20 03:33 ypdomainname -> hostname + -rwxr-xr-x 1 root root 1984 Apr 10 2022 zcat + -rwxr-xr-x 1 root root 1678 Apr 10 2022 zcmp + -rwxr-xr-x 1 root root 6460 Apr 10 2022 zdiff + -rwxr-xr-x 1 root root 29 Apr 10 2022 zegrep + -rwxr-xr-x 1 root root 29 Apr 10 2022 zfgrep + -rwxr-xr-x 1 root root 2081 Apr 10 2022 zforce + -rwxr-xr-x 1 root root 8103 Apr 10 2022 zgrep + -rwxr-xr-x 1 root root 2206 Apr 10 2022 zless + -rwxr-xr-x 1 root root 1842 Apr 10 2022 zmore + -rwxr-xr-x 1 root root 4577 Apr 10 2022 znew +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/D02_print_environment finished -> Attempting to satisfy build-dependencies -> Creating pbuilder-satisfydepends-dummy package Package: pbuilder-satisfydepends-dummy @@ -353,7 +385,7 @@ Get: 109 http://deb.debian.org/debian bookworm/main armhf python3-voluptuous all 0.12.2-1 [43.4 kB] Get: 110 http://deb.debian.org/debian bookworm/main armhf python3-yaml armhf 6.0-3+b2 [111 kB] Get: 111 http://deb.debian.org/debian bookworm/main armhf w3m armhf 0.5.3+git20230121-2 [1021 kB] -Fetched 62.7 MB in 6s (10.4 MB/s) +Fetched 62.7 MB in 3s (24.6 MB/s) debconf: delaying package configuration, since apt-utils is not installed Selecting previously unselected package fonts-lato. (Reading database ... (Reading database ... 5% (Reading database ... 10% (Reading database ... 15% (Reading database ... 20% (Reading database ... 25% (Reading database ... 30% (Reading database ... 35% (Reading database ... 40% (Reading database ... 45% (Reading database ... 50% (Reading database ... 55% (Reading database ... 60% (Reading database ... 65% (Reading database ... 70% (Reading database ... 75% (Reading database ... 80% (Reading database ... 85% (Reading database ... 90% (Reading database ... 95% (Reading database ... 100% (Reading database ... 19329 files and directories currently installed.) @@ -827,10 +859,17 @@ Reading package lists... Building dependency tree... Reading state information... +usrmerge is already the newest version (35). fakeroot is already the newest version (1.31-1.2). 0 upgraded, 0 newly installed, 0 to remove and 0 not upgraded. I: Building the package -I: Running cd /build/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games" HOME="/nonexistent/first-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/A99_set_merged_usr starting +Re-configuring usrmerge... +removed '/etc/unsupported-skip-usrmerge-conversion' +The system has been successfully converted. +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/A99_set_merged_usr finished +hostname: Name or service not known +I: Running cd /build/elasticsearch-curator-5.8.1/ && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-buildpackage -us -uc -b && env PATH="/usr/sbin:/usr/bin:/sbin:/bin:/usr/games:/i/capture/the/path" HOME="/nonexistent/second-build" dpkg-genchanges -S > ../elasticsearch-curator_5.8.1-4.1_source.changes dpkg-buildpackage: info: source package elasticsearch-curator dpkg-buildpackage: info: source version 5.8.1-4.1 dpkg-buildpackage: info: source distribution unstable @@ -860,20 +899,20 @@ running build running build_py creating /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/logtools.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/_version.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/cli.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/utils.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/actions.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/logtools.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/repomgrcli.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/cli.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/curator_cli.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/exceptions.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/config_utils.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/indexlist.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator +copying curator/__init__.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/singletons.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/snapshotlist.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/utils.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator copying curator/__main__.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/__init__.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/indexlist.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/repomgrcli.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/exceptions.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator -copying curator/config_utils.py -> /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator running egg_info creating elasticsearch_curator.egg-info writing elasticsearch_curator.egg-info/PKG-INFO @@ -1083,7 +1122,7 @@ writing manifest file 'elasticsearch_curator.egg-info/SOURCES.txt' running build_ext Starting new HTTP connection (1): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.021s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1137,10 +1176,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (2): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.013s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1194,10 +1233,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (3): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1251,10 +1290,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (4): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.001s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1308,7 +1347,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (5): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -1365,7 +1404,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (6): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -1422,7 +1461,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (7): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -1479,10 +1518,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (8): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1536,7 +1575,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (9): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -1593,7 +1632,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (10): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.003s] @@ -1650,10 +1689,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (11): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1707,10 +1746,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (12): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.013s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1764,10 +1803,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (13): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.016s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1821,10 +1860,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (14): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1878,10 +1917,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (15): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -1935,7 +1974,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (16): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.004s] @@ -1992,10 +2031,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (17): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2049,10 +2088,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (18): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2106,10 +2145,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (19): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2163,10 +2202,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (20): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.012s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2220,10 +2259,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (21): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2277,10 +2316,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (22): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.021s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2334,10 +2373,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (23): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2391,10 +2430,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (24): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2448,10 +2487,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (25): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2505,10 +2544,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (26): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2562,10 +2601,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (27): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2619,10 +2658,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (28): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2676,10 +2715,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (29): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2733,10 +2772,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (30): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2790,10 +2829,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (31): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.016s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2847,10 +2886,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (32): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.023s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2904,10 +2943,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (33): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -2961,10 +3000,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (34): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3018,10 +3057,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (35): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3075,10 +3114,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (36): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3132,10 +3171,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (37): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3189,10 +3228,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (38): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3246,10 +3285,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (39): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3303,10 +3342,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (40): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.013s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3360,10 +3399,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (41): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.028s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3417,10 +3456,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (42): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3474,7 +3513,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (43): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -3531,10 +3570,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (44): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3588,10 +3627,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (45): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3645,10 +3684,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (46): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3702,10 +3741,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (47): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3759,10 +3798,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (48): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3816,10 +3855,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (49): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3873,10 +3912,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (50): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.016s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3930,10 +3969,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (51): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.019s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -3987,10 +4026,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (52): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4044,10 +4083,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (53): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4101,10 +4140,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (54): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.007s] +GET http://localhost:9200/ [status:N/A request:0.006s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4158,10 +4197,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (55): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4215,10 +4254,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (56): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4272,10 +4311,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (57): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4329,10 +4368,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (58): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.004s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4386,10 +4425,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (59): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4443,10 +4482,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (60): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4500,10 +4539,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (61): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.011s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4557,10 +4596,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (62): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4614,10 +4653,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (63): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4671,10 +4710,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (64): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4728,10 +4767,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (65): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4785,10 +4824,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (66): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4842,10 +4881,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (67): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4899,10 +4938,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (68): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -4956,10 +4995,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (69): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.016s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5013,10 +5052,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (70): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.013s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5070,10 +5109,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (71): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.005s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5127,7 +5166,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (72): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5184,7 +5223,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (73): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5241,7 +5280,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (74): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5298,7 +5337,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (75): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5355,10 +5394,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (76): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.007s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5412,10 +5451,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (77): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5469,10 +5508,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (78): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5526,10 +5565,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (79): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5583,10 +5622,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (80): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.029s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5640,7 +5679,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (81): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5697,10 +5736,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (82): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.003s] +GET http://localhost:9200/ [status:N/A request:0.002s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5754,10 +5793,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (83): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5811,7 +5850,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (84): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -5868,10 +5907,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (85): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5925,10 +5964,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (86): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -5982,10 +6021,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (87): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6039,10 +6078,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (88): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.008s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6096,10 +6135,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (89): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.009s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6153,7 +6192,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (90): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6210,7 +6249,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (91): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6267,7 +6306,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (92): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6324,7 +6363,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (93): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6381,7 +6420,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (94): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6438,10 +6477,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (95): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.003s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6495,10 +6534,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (96): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6552,10 +6591,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (97): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.004s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6609,10 +6648,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (98): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.005s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6666,10 +6705,10 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (99): localhost:9200 -GET http://localhost:9200/ [status:N/A request:0.002s] +GET http://localhost:9200/ [status:N/A request:0.012s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -6723,7 +6762,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Starting new HTTP connection (100): localhost:9200 GET http://localhost:9200/ [status:N/A request:0.002s] @@ -6780,7 +6819,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused > None Getting all indices Detected Elasticsearch version 5.0.0 @@ -8134,7 +8173,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -8170,7 +8209,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 0}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 0, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 0, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 0}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. Response: True Action "reindex" finished executing (may or may not have been successful) Result: True @@ -9486,9 +9525,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684091187) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684091926) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684091187) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684091926) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9541,9 +9580,9 @@ Filtering indices by age Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684091187) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684091926) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684091187) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684091926) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9670,9 +9709,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684091187) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684091927) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684091187) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684091927) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9693,9 +9732,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684091187) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "older", point of reference, (1684091927) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684091187) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "older", point of reference, (1684091927) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9739,9 +9778,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684091187) +Removed from actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684091927) Index index-2016.03.04 is not actionable, removing from list. -Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684091187) +Removed from actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684091927) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -9762,9 +9801,9 @@ Generating working list of indices Generating working list of indices Index index-2016.03.03 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684091187) +Remains in actionable list: Index "index-2016.03.03" age (1456963200), direction: "younger", point of reference, (1684091927) Index index-2016.03.04 is actionable and remains in the list. -Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684091187) +Remains in actionable list: Index "index-2016.03.04" age (1457049600), direction: "younger", point of reference, (1684091927) .Getting all indices Detected Elasticsearch version 5.0.0 All indices: ['index-2016.03.03', 'index-2016.03.04'] @@ -11083,7 +11122,7 @@ All filters: [{'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Parsed filter args: {'filtertype': 'age', 'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} Filter args: {'source': 'name', 'direction': 'older', 'timestring': '%Y.%m.%d', 'unit': 'seconds', 'unit_count': 0, 'epoch': 1456963201} @@ -11116,7 +11155,7 @@ All filters: [{'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Parsed filter args: {'filtertype': 'allocated', 'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} Filter args: {'key': 'tag', 'value': 'foo', 'allocation_type': 'include'} @@ -11148,7 +11187,7 @@ All filters: [{'filtertype': 'closed'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'closed'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'closed'} Parsed filter args: {'filtertype': 'closed'} Filtering closed indices @@ -11177,7 +11216,7 @@ All filters: [{'filtertype': 'forcemerged', 'max_num_segments': 2}] Top of the loop: ['index_name'] Un-parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 2} Parsed filter args: {'filtertype': 'forcemerged', 'max_num_segments': 2} Filter args: {'max_num_segments': 2} @@ -11212,7 +11251,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11240,7 +11279,7 @@ All filters: [{'filtertype': 'ilm', 'exclude': True}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'ilm', 'exclude': True} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'ilm', 'exclude': True} Parsed filter args: {'filtertype': 'ilm', 'exclude': True} Filter args: {'exclude': True} @@ -11271,7 +11310,7 @@ All filters: [{'filtertype': 12345.6789}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11291,7 +11330,7 @@ All filters: [{'filtertype': 'kibana'}] Top of the loop: ['.kibana', '.kibana-5', '.kibana-6', 'dummy'] Un-parsed filter args: {'filtertype': 'kibana'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'kibana'} Parsed filter args: {'filtertype': 'kibana'} Filtering kibana indices @@ -11339,7 +11378,7 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Getting all indices @@ -11359,7 +11398,7 @@ All filters: [{'filtertype': 'none'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} "None" filter selected. No filtering will be done. @@ -11383,7 +11422,7 @@ All filters: [{'filtertype': 'opened'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'opened'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'opened'} Parsed filter args: {'filtertype': 'opened'} Filtering open indices @@ -11417,7 +11456,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'a', 'exclude': False} Filter args: {'kind': 'prefix', 'value': 'a', 'exclude': False} @@ -11454,7 +11493,7 @@ All filters: [{'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'}] Top of the loop: ['a-2016.03.03', 'b-2016.03.04', 'c-2016.03.05', 'd-2016.03.06'] Un-parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Parsed filter args: {'filtertype': 'space', 'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} Filter args: {'disk_space': 2.1, 'source': 'name', 'use_age': True, 'timestring': '%Y.%m.%d'} @@ -11501,7 +11540,7 @@ All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['index-2016.03.03', 'index-2016.03.04'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Getting all indices @@ -11550,30 +11589,30 @@ All filters: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Parsed filter args: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1} Filter args: {'direction': 'older', 'unit': 'days', 'unit_count': 1} Pre-instance: ['snap_name', 'snapshot-2015.03.01'] Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091936 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1684091188) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1684091936) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1684091188) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1684091936) Post-instance: ['snap_name', 'snapshot-2015.03.01'] .Repository repo_name exists. All filters: [{'filtertype': 'sir_not_appearing_in_this_film'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'sir_not_appearing_in_this_film'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'sir_not_appearing_in_this_film'} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 12345.6789}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 12345.6789} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 12345.6789} Schema error: filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state'] for dictionary value @ data['filtertype'] .Repository repo_name exists. @@ -11582,14 +11621,14 @@ All filters: [{'no_filtertype': 'fail'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'no_filtertype': 'fail'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'no_filtertype': 'fail'} Schema error: extra keys not allowed @ data['no_filtertype'] .Repository repo_name exists. All filters: [{'filtertype': 'none'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'none'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'none'} Parsed filter args: {'filtertype': 'none'} Filter args: {} @@ -11600,7 +11639,7 @@ All filters: [{'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'}] Top of the loop: ['snap_name', 'snapshot-2015.03.01'] Un-parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} -Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} +Schema: {'aliases': Any(, , msg=None), 'allocation_type': Any(, msg=None), 'count': Coerce(int, msg=None), 'date_from': Any(None, , msg=None), 'date_from_format': Any(None, , msg=None), 'date_to': Any(None, , msg=None), 'date_to_format': Any(None, , msg=None), 'direction': Any(, msg=None), 'disk_space': , 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(None, , , , msg=None), 'field': Any(None, , msg=None), 'intersect': Any(None, , , , msg=None), 'key': Any(, msg=None), 'kind': Any(, msg=None), 'max_num_segments': Coerce(int, msg=None), 'number_of_shards': Coerce(int, msg=None), 'pattern': Any(, msg=None), 'period_type': Any(, msg=None), 'reverse': Any(None, , , , msg=None), 'range_from': Coerce(int, msg=None), 'range_to': Coerce(int, msg=None), 'shard_filter_behavior': Any(, msg=None), 'source': Any(, msg=None), 'state': Any(, msg=None), 'stats_result': Any(None, , msg=None), 'timestring': Any(None, , msg=None), 'threshold_behavior': Any(, msg=None), 'unit': Any(, msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'use_age': , 'value': Any(, , , , msg=None), 'week_starts_on': Any(None, , msg=None), 'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']")} "filter" config: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Parsed filter args: {'filtertype': 'pattern', 'kind': 'prefix', 'value': 'sna'} Filter args: {'kind': 'prefix', 'value': 'sna'} @@ -11618,11 +11657,11 @@ ..Repository repo_name not found... ..Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 Snapshot snap_name is actionable and remains in the list. -Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1684091188) +Remains in actionable list: Snapshot "snap_name" age (1422748800), direction: "older", point of reference, (1684091937) Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1684091188) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1684091937) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1425168001 @@ -11632,11 +11671,11 @@ Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "older", point of reference, (1425168001) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 Snapshot snap_name is not actionable, removing from list. -Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1684091188) +Removed from actionable list: Snapshot "snap_name" age (1422748800), direction: "younger", point of reference, (1684091937) Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1684091188) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1684091937) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748801 @@ -11646,14 +11685,14 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168002), direction: "younger", point of reference, (1422748801) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is actionable and remains in the list. -Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1684091188) +Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1684091937) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1456963200 @@ -11663,11 +11702,11 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "older", point of reference, (1456963200) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 regex = \d{4}\.\d{2}\.\d{2} Removing snapshot {0} for having no age Snapshot snapshot-2015.03.01 is not actionable, removing from list. -Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1684091188) +Removed from actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1684091937) .Repository repo_name exists. Starting filter_by_age Point of Reference: 1422748800 @@ -11677,13 +11716,13 @@ Remains in actionable list: Snapshot "snapshot-2015.03.01" age (1425168000), direction: "younger", point of reference, (1422748800) .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 .Repository repo_name exists. Starting filter_by_age -Point of Reference: 1684091188 +Point of Reference: 1684091937 .Repository repo_name exists. Filtering snapshots by count .Repository repo_name exists. @@ -11932,7 +11971,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -SSGET https://127.0.0.1:9200/ [status:N/A request:0.009s] +SSGET https://127.0.0.1:9200/ [status:N/A request:0.024s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -11963,7 +12002,7 @@ warnings.warn( Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.008s] +GET https://127.0.0.1:9200/ [status:N/A request:0.013s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -11993,7 +12032,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12006,7 +12045,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12019,9 +12058,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'use_ssl': True, 'client_cert': 'myclientcert.pem', 'hosts': ['127.0.0.1'], 'master_only': False, 'ssl_no_validate': False, 'certificate': False, 'client_key': False} Attempting to verify SSL certificate. @@ -12029,7 +12068,7 @@ Not using "requests_aws4auth" python module to connect. Instantiating client object Testing client connectivity -GET https://127.0.0.1:9200/ [status:N/A request:0.006s] +GET https://127.0.0.1:9200/ [status:N/A request:0.012s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12059,7 +12098,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.008s] +GET https://127.0.0.1:9200/ [status:N/A request:0.017s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12089,7 +12128,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12102,7 +12141,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12115,9 +12154,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. ..kwargs = {'url_prefix': '', 'master_only': True, 'hosts': ['127.0.0.1', '127.0.0.1'], 'use_ssl': False, 'ssl_no_validate': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12129,7 +12168,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.007s] +GET https://127.0.0.1:9200/ [status:N/A request:0.012s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12159,7 +12198,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12172,7 +12211,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12185,9 +12224,9 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. .kwargs = {'url_prefix': '', 'use_ssl': True, 'ssl_no_validate': True, 'hosts': ['127.0.0.1'], 'master_only': False, 'certificate': False, 'client_cert': False, 'client_key': False} Checking for AWS settings @@ -12195,7 +12234,7 @@ Instantiating client object Testing client connectivity Starting new HTTPS connection (1): 127.0.0.1:9200 -GET https://127.0.0.1:9200/ [status:N/A request:0.007s] +GET https://127.0.0.1:9200/ [status:N/A request:0.014s] Traceback (most recent call last): File "/build/elasticsearch-curator-5.8.1/setup.py", line 42, in from cx_Freeze import setup, Executable @@ -12225,7 +12264,7 @@ ^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/connection.py", line 186, in _new_conn raise NewConnectionError( -urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused +urllib3.exceptions.NewConnectionError: : Failed to establish a new connection: [Errno 111] Connection refused During handling of the above exception, another exception occurred: @@ -12238,7 +12277,7 @@ ^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/urllib3/util/retry.py", line 592, in increment raise MaxRetryError(_pool, url, error or ResponseError(cause)) -urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +urllib3.exceptions.MaxRetryError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) During handling of the above exception, another exception occurred: @@ -12251,11 +12290,11 @@ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/usr/lib/python3/dist-packages/requests/adapters.py", line 559, in send raise ProxyError(e, request=request) -requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +requests.exceptions.ProxyError: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) > None -HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) +HTTP N/A error: HTTPSConnectionPool(host='127.0.0.1', port=9200): Max retries exceeded with url: / (Caused by ProxyError('Cannot connect to proxy.', NewConnectionError(': Failed to establish a new connection: [Errno 111] Connection refused'))) Curator cannot proceed. Exiting. -.Random datemath string for extraction: +.Random datemath string for extraction: Response index name for extraction: failure .Random datemath string for extraction: Response index name for extraction: curator_get_datemath_function_not_random_at_all-hasthemath @@ -12330,7 +12369,7 @@ TASK_DATA: {'completed': True, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] Running time: 1637.0395377210002 seconds -Task "UNIT TEST" completed at 2017-03-16T08:53:39Z. +Task "UNIT TEST" completed at 2017-03-17T10:53:39Z. .It's a REINDEX TASK TASK_DATA: {'completed': False, 'task': {'node': 'I0ekFjMhSPCQz7FUs1zJOg', 'status': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'requests_per_second': -1.0, 'version_conflicts': 0, 'total': 3646581}, 'description': 'UNIT TEST', 'running_time_in_nanos': 1637039537721, 'cancellable': True, 'action': 'indices:data/write/reindex', 'type': 'transport', 'id': 54510686, 'start_time_in_millis': 1489695981997}, 'response': {'retries': {'bulk': 0, 'search': 0}, 'updated': 0, 'batches': 3647, 'throttled_until_millis': 0, 'throttled_millis': 0, 'noops': 0, 'created': 3646581, 'deleted': 0, 'took': 1636917, 'requests_per_second': -1.0, 'timed_out': False, 'failures': [], 'version_conflicts': 0, 'total': 3646581}} TASK_DATA keys: ['completed', 'task', 'response'] @@ -12348,93 +12387,93 @@ Response: False Unable to complete action "replicas" within max_wait (1) seconds. Result: False -......Schema: .f at 0xf5b49938> +......Schema: .f at 0xb4d2a988> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'exclude': False, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0xf5b75758> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'exclude': False, 'stats_result': 'min_value', 'epoch': None, 'timestring': None} +.Schema: .f at 0xb4d567a8> "filters" config: [{'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'}] -AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +AGE FILTER = [{'direction': Any('older', 'younger', msg=None)}, {'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None)}, {'unit_count': Coerce(int, msg=None)}, {'unit_count_pattern': Any(, msg=None)}, {'epoch': Any(Coerce(int, msg=None), None, msg=None)}, {'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)}, {'source': Any('name', 'creation_date', 'field_stats', msg=None)}, {'stats_result': Any('min_value', 'max_value', msg=None)}, {'field': Any(, msg=None)}, {'timestring': Any(None, , msg=None)}] +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'direction': Any('older', 'younger', msg=None), 'unit': Any('seconds', 'minutes', 'hours', 'days', 'weeks', 'months', 'years', msg=None), 'unit_count': Coerce(int, msg=None), 'unit_count_pattern': Any(, msg=None), 'epoch': Any(Coerce(int, msg=None), None, msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': '1', 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'epoch': None, 'exclude': False, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0xf5b18d98> +Filter #0: {'filtertype': 'age', 'direction': 'older', 'unit': 'days', 'unit_count': 1, 'source': 'field_stats', 'field': '@timestamp', 'exclude': False, 'stats_result': 'min_value', 'epoch': None, 'timestring': None} +.Schema: .f at 0xb4d08de8> "filters" config: [{'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'aliases': Any(, , msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} Filter #0: {'filtertype': 'alias', 'aliases': ['alias1', 'alias2'], 'exclude': False} -.Schema: .f at 0xf5b18de8> +.Schema: .f at 0xb4d08e38> "filters" config: [{'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'key': Any(, msg=None), 'value': Any(, msg=None), 'allocation_type': All(Any(, msg=None), Any('require', 'include', 'exclude', msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} Filter #0: {'filtertype': 'allocated', 'key': 'foo', 'value': 'bar', 'allocation_type': 'require', 'exclude': False} -.Schema: .f at 0xf5b18d98> +.Schema: .f at 0xb4d08de8> "filters" config: [{'filtertype': 'closed', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'closed', 'exclude': False} Filter #0: {'filtertype': 'closed', 'exclude': False} -.Schema: .f at 0xf5b18de8> +.Schema: .f at 0xb4d08e38> "filters" config: [{'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'count': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'pattern': Any(, msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False} -Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'use_age': False, 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0xf5b18d98> +Filter #0: {'filtertype': 'count', 'count': 1, 'reverse': True, 'exclude': False, 'use_age': False, 'stats_result': 'min_value', 'timestring': None} +.Schema: .f at 0xb4d08de8> "filters" config: [{'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'max_num_segments': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} Filter #0: {'filtertype': 'forcemerged', 'max_num_segments': 1, 'exclude': False} -.Schema: .f at 0xf5b18de8> +.Schema: .f at 0xb4d08e38> "filters" config: [{'filtertype': 'kibana', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'kibana', 'exclude': False} Filter #0: {'filtertype': 'kibana', 'exclude': False} -.Schema: .f at 0xf5b18d98> +.Schema: .f at 0xb4d08de8> "filters" config: [{'filtertype': 'opened', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'opened', 'exclude': False} Filter #0: {'filtertype': 'opened', 'exclude': False} -.Schema: .f at 0xf5b18de8> +.Schema: .f at 0xb4d08e38> "filters" config: [{'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'number_of_shards': All(Coerce(int, msg=None), Range(min=1, max=None, min_included=True, max_included=True, msg=None), msg=None), 'shard_filter_behavior': Any('greater_than', 'less_than', 'greater_than_or_equal', 'less_than_or_equal', 'equal', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} Filter #0: {'filtertype': 'shards', 'number_of_shards': 5, 'shard_filter_behavior': 'greater_than', 'exclude': False} -.Schema: .f at 0xf5b298e8> +.Schema: .f at 0xb4d0a938> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'creation_date'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'reverse': True, 'threshold_behavior': 'greater_than', 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0xf5ca6e88> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'creation_date', 'stats_result': 'min_value', 'timestring': None, 'threshold_behavior': 'greater_than', 'reverse': True} +.Schema: .f at 0xb4e85cf8> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'reverse': True, 'threshold_behavior': 'greater_than', 'timestring': None, 'stats_result': 'min_value'} -.Schema: .f at 0xf5b18708> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'field_stats', 'field': '@timestamp', 'stats_result': 'min_value', 'timestring': None, 'threshold_behavior': 'greater_than', 'reverse': True} +.Schema: .f at 0xb4d08758> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'field': Any(, msg=None), 'timestring': Any(None, , msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'} Schema error: required key not provided @ data['field'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'field_stats'}: Bad Value: "(could not determine)", required key not provided @ data['field']. Check configuration file. -.Schema: .f at 0xf5b187f8> +.Schema: .f at 0xb4d08848> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} -.Schema: .f at 0xf5b18708> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} +.Schema: .f at 0xb4d08758> "filters" config: [{'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'} Schema error: required key not provided @ data['timestring'] Schema error: Configuration: filter: Location: None, filter #0: {'filtertype': 'space', 'disk_space': 1, 'use_age': True, 'exclude': False, 'source': 'name'}: Bad Value: "(could not determine)", required key not provided @ data['timestring']. Check configuration file. -.Schema: .f at 0xf5b187f8> +.Schema: .f at 0xb4d08848> "filters" config: [{'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'disk_space': Any(Coerce(float, msg=None), msg=None), 'reverse': Any(, All(Any(, msg=None), , msg=None), msg=None), 'use_age': Any(, All(Any(, msg=None), , msg=None), msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None), 'threshold_behavior': Any('greater_than', 'less_than', msg=None), 'source': Any('name', 'creation_date', 'field_stats', msg=None), 'stats_result': Any('min_value', 'max_value', msg=None), 'timestring': Any(, msg=None)} "filter" config: {'filtertype': 'space', 'disk_space': '1.0', 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d'} -Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'reverse': True, 'threshold_behavior': 'greater_than', 'stats_result': 'min_value'} -.Schema: .f at 0xf5b18708> +Filter #0: {'filtertype': 'space', 'disk_space': 1.0, 'use_age': True, 'exclude': False, 'source': 'name', 'timestring': '%Y.%m.%d', 'stats_result': 'min_value', 'threshold_behavior': 'greater_than', 'reverse': True} +.Schema: .f at 0xb4d08758> "filters" config: [{'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False}] -Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} +Schema: {'filtertype': Any(In(['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']), msg="filtertype must be one of ['age', 'alias', 'allocated', 'closed', 'count', 'empty', 'forcemerged', 'ilm', 'kibana', 'none', 'opened', 'pattern', 'period', 'shards', 'space', 'state']"), 'state': Any('SUCCESS', 'PARTIAL', 'FAILED', 'IN_PROGRESS', msg=None), 'exclude': Any(, All(Any(, msg=None), , msg=None), msg=None)} "filter" config: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} Filter #0: {'filtertype': 'state', 'state': 'SUCCESS', 'exclude': False} .. @@ -12470,7 +12509,7 @@ /build/elasticsearch-curator-5.8.1/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? 'to' if job is 'add' else 'from', ---------------------------------------------------------------------- -Ran 529 tests in 17.938s +Ran 529 tests in 42.921s OK (SKIP=3) create-stamp debian/debhelper-build-stamp @@ -12593,96 +12632,96 @@ creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11 creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filter_elements.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/settings.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/filtertypes.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/client_defaults.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/defaults/option_defaults.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/logtools.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/_version.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/repomgrcli.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/actions.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/curator_cli.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/config_utils.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/snapshotlist.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__main__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/close.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/delete.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/open_indices.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/rollover.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/object_class.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/utils.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/show.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/alias.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/replicas.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/allocation.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/restore.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/snapshot.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/shrink.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons -creating /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/options.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/config_file.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/actions.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/schemacheck.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/validators/filters.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/singletons.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/snapshotlist.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/utils.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__main__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/indexlist.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/repomgrcli.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/exceptions.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator -copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/config_utils.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/freeze.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/forcemerge.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/unfreeze.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +copying /build/elasticsearch-curator-5.8.1/.pybuild/cpython3_3.11_elasticsearch-curator/build/curator/cli_singletons/__init__.py -> /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc +/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filter_elements.py to filter_elements.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/settings.py to settings.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/filtertypes.py to filtertypes.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/client_defaults.py to client_defaults.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/defaults/option_defaults.py to option_defaults.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/logtools.py to logtools.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/_version.py to _version.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli.py to cli.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/actions.py to actions.cpython-311.pyc -/usr/lib/python3.11/dist-packages/curator/actions.py:149: SyntaxWarning: "is" with a literal. Did you mean "=="? byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/curator_cli.py to curator_cli.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/config_utils.py to config_utils.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__main__.py to __main__.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/utils.py to utils.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/close.py to close.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/delete.py to delete.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/open_indices.py to open_indices.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/rollover.py to rollover.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/object_class.py to object_class.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/utils.py to utils.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/show.py to show.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/alias.py to alias.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/replicas.py to replicas.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/allocation.py to allocation.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/restore.py to restore.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/snapshot.py to snapshot.cpython-311.pyc byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/shrink.py to shrink.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/options.py to options.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/config_file.py to config_file.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/actions.py to actions.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/schemacheck.py to schemacheck.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/validators/filters.py to filters.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/singletons.py to singletons.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/snapshotlist.py to snapshotlist.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/utils.py to utils.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__main__.py to __main__.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/__init__.py to __init__.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/indexlist.py to indexlist.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/repomgrcli.py to repomgrcli.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/exceptions.py to exceptions.cpython-311.pyc -byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/config_utils.py to config_utils.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/freeze.py to freeze.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/forcemerge.py to forcemerge.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/unfreeze.py to unfreeze.cpython-311.pyc +byte-compiling /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/curator/cli_singletons/__init__.py to __init__.cpython-311.pyc running install_egg_info Copying elasticsearch_curator.egg-info to /build/elasticsearch-curator-5.8.1/debian/python3-elasticsearch-curator/usr/lib/python3.11/dist-packages/elasticsearch_curator-5.8.1.egg-info Skipping SOURCES.txt @@ -12711,8 +12750,8 @@ dpkg-gencontrol: warning: package python-elasticsearch-curator-doc: substitution variable ${sphinxdoc:Built-Using} unused, but is defined dh_md5sums -O--buildsystem=pybuild dh_builddeb -O--buildsystem=pybuild -dpkg-deb: building package 'python-elasticsearch-curator-doc' in '../python-elasticsearch-curator-doc_5.8.1-4.1_all.deb'. dpkg-deb: building package 'elasticsearch-curator' in '../elasticsearch-curator_5.8.1-4.1_all.deb'. +dpkg-deb: building package 'python-elasticsearch-curator-doc' in '../python-elasticsearch-curator-doc_5.8.1-4.1_all.deb'. dpkg-deb: building package 'python3-elasticsearch-curator' in '../python3-elasticsearch-curator_5.8.1-4.1_all.deb'. dpkg-genbuildinfo --build=binary -O../elasticsearch-curator_5.8.1-4.1_armhf.buildinfo dpkg-genchanges --build=binary -O../elasticsearch-curator_5.8.1-4.1_armhf.changes @@ -12721,12 +12760,14 @@ dpkg-buildpackage: info: binary-only upload (no source included) dpkg-genchanges: info: not including original source code in upload I: copying local configuration +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/B01_cleanup starting +I: user script /srv/workspace/pbuilder/26870/tmp/hooks/B01_cleanup finished I: unmounting dev/ptmx filesystem I: unmounting dev/pts filesystem I: unmounting dev/shm filesystem I: unmounting proc filesystem I: unmounting sys filesystem I: cleaning the build env -I: removing directory /srv/workspace/pbuilder/22109 and its subdirectories -I: Current time: Mon May 15 07:06:55 -12 2023 -I: pbuilder-time-stamp: 1684177615 +I: removing directory /srv/workspace/pbuilder/26870 and its subdirectories +I: Current time: Tue May 16 09:21:16 +14 2023 +I: pbuilder-time-stamp: 1684178476