+ echo_summary 'Installing package prerequisites' + [[ -t 3 ]] + echo -e Installing package prerequisites + source /home/cloudbase/devstack/tools/install_prereqs.sh ++ [[ -n '' ]] ++ [[ -z /home/cloudbase/devstack ]] ++ PREREQ_RERUN_MARKER=/home/cloudbase/devstack/.prereqs ++ PREREQ_RERUN_HOURS=2 ++ PREREQ_RERUN_SECONDS=7200 +++ date +%s ++ NOW=1413070484 +++ head -1 /home/cloudbase/devstack/.prereqs ++ LAST_RUN=1413061287 ++ DELTA=9197 ++ [[ 9197 -lt 7200 ]] ++ export_proxy_variables ++ [[ -n '' ]] ++ [[ -n '' ]] ++ [[ -n '' ]] +++ get_packages general g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api ++++ set +o ++++ grep xtrace +++ local 'xtrace=set -o xtrace' +++ set +o xtrace ++ PACKAGES='bridge-utils pylint python-setuptools screen unzip wget psmisc git lsof openssh-server openssl python-virtualenv python-unittest2 iputils-ping wget curl tcpdump euca2ools tar python-cmd2 python-dev python2.7 bc gcc libffi-dev libxml2-dev python-eventlet python-routes python-greenlet python-sqlalchemy python-wsgiref python-pastedeploy python-xattr python-iso8601 python-lxml python-pastescript python-pastedeploy python-paste sqlite3 python-pysqlite2 python-sqlalchemy python-mysqldb python-webob python-greenlet python-routes libldap2-dev libsasl2-dev python-dateutil msgpack-python fping dnsmasq-base dnsmasq-utils conntrack kpartx parted iputils-arping python-mysqldb python-xattr python-lxml gawk iptables ebtables sqlite3 sudo pm-utils libjs-jquery-tablesorter vlan curl genisoimage socat python-mox python-paste python-migrate python-greenlet python-libxml2 python-routes python-numpy python-pastedeploy python-eventlet python-cheetah python-tempita python-sqlalchemy python-suds python-lockfile python-m2crypto python-boto python-kombu python-feedparser python-iso8601 python-qpid tgt lvm2 qemu-utils libpq-dev open-iscsi open-iscsi-utils libxslt1-dev ebtables iptables iputils-ping iputils-arping sudo python-boto python-iso8601 python-paste python-routes python-suds python-pastedeploy python-greenlet python-kombu python-eventlet python-sqlalchemy python-mysqldb python-pyudev python-qpid dnsmasq-base dnsmasq-utils sqlite3 vlan curl gcc libffi-dev memcached python-configobj python-coverage python-eventlet python-greenlet python-netifaces python-nose python-pastedeploy python-simplejson python-webob python-xattr sqlite3 xfsprogs libnspr4-dev pkg-config libxml2-dev libxslt-dev' ++ is_ubuntu ++ [[ -z deb ]] ++ '[' deb = deb ']' ++ grep -q dkms ++ echo bridge-utils pylint python-setuptools screen unzip wget psmisc git lsof openssh-server openssl python-virtualenv python-unittest2 iputils-ping wget curl tcpdump euca2ools tar python-cmd2 python-dev python2.7 bc gcc libffi-dev libxml2-dev python-eventlet python-routes python-greenlet python-sqlalchemy python-wsgiref python-pastedeploy python-xattr python-iso8601 python-lxml python-pastescript python-pastedeploy python-paste sqlite3 python-pysqlite2 python-sqlalchemy python-mysqldb python-webob python-greenlet python-routes libldap2-dev libsasl2-dev python-dateutil msgpack-python fping dnsmasq-base dnsmasq-utils conntrack kpartx parted iputils-arping python-mysqldb python-xattr python-lxml gawk iptables ebtables sqlite3 sudo pm-utils libjs-jquery-tablesorter vlan curl genisoimage socat python-mox python-paste python-migrate python-greenlet python-libxml2 python-routes python-numpy python-pastedeploy python-eventlet python-cheetah python-tempita python-sqlalchemy python-suds python-lockfile python-m2crypto python-boto python-kombu python-feedparser python-iso8601 python-qpid tgt lvm2 qemu-utils libpq-dev open-iscsi open-iscsi-utils libxslt1-dev ebtables iptables iputils-ping iputils-arping sudo python-boto python-iso8601 python-paste python-routes python-suds python-pastedeploy python-greenlet python-kombu python-eventlet python-sqlalchemy python-mysqldb python-pyudev python-qpid dnsmasq-base dnsmasq-utils sqlite3 vlan curl gcc libffi-dev memcached python-configobj python-coverage python-eventlet python-greenlet python-netifaces python-nose python-pastedeploy python-simplejson python-webob python-xattr sqlite3 xfsprogs libnspr4-dev pkg-config libxml2-dev libxslt-dev ++ install_package bridge-utils pylint python-setuptools screen unzip wget psmisc git lsof openssh-server openssl python-virtualenv python-unittest2 iputils-ping wget curl tcpdump euca2ools tar python-cmd2 python-dev python2.7 bc gcc libffi-dev libxml2-dev python-eventlet python-routes python-greenlet python-sqlalchemy python-wsgiref python-pastedeploy python-xattr python-iso8601 python-lxml python-pastescript python-pastedeploy python-paste sqlite3 python-pysqlite2 python-sqlalchemy python-mysqldb python-webob python-greenlet python-routes libldap2-dev libsasl2-dev python-dateutil msgpack-python fping dnsmasq-base dnsmasq-utils conntrack kpartx parted iputils-arping python-mysqldb python-xattr python-lxml gawk iptables ebtables sqlite3 sudo pm-utils libjs-jquery-tablesorter vlan curl genisoimage socat python-mox python-paste python-migrate python-greenlet python-libxml2 python-routes python-numpy python-pastedeploy python-eventlet python-cheetah python-tempita python-sqlalchemy python-suds python-lockfile python-m2crypto python-boto python-kombu python-feedparser python-iso8601 python-qpid tgt lvm2 qemu-utils libpq-dev open-iscsi open-iscsi-utils libxslt1-dev ebtables iptables iputils-ping iputils-arping sudo python-boto python-iso8601 python-paste python-routes python-suds python-pastedeploy python-greenlet python-kombu python-eventlet python-sqlalchemy python-mysqldb python-pyudev python-qpid dnsmasq-base dnsmasq-utils sqlite3 vlan curl gcc libffi-dev memcached python-configobj python-coverage python-eventlet python-greenlet python-netifaces python-nose python-pastedeploy python-simplejson python-webob python-xattr sqlite3 xfsprogs libnspr4-dev pkg-config libxml2-dev libxslt-dev +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ apt_get install bridge-utils pylint python-setuptools screen unzip wget psmisc git lsof openssh-server openssl python-virtualenv python-unittest2 iputils-ping wget curl tcpdump euca2ools tar python-cmd2 python-dev python2.7 bc gcc libffi-dev libxml2-dev python-eventlet python-routes python-greenlet python-sqlalchemy python-wsgiref python-pastedeploy python-xattr python-iso8601 python-lxml python-pastescript python-pastedeploy python-paste sqlite3 python-pysqlite2 python-sqlalchemy python-mysqldb python-webob python-greenlet python-routes libldap2-dev libsasl2-dev python-dateutil msgpack-python fping dnsmasq-base dnsmasq-utils conntrack kpartx parted iputils-arping python-mysqldb python-xattr python-lxml gawk iptables ebtables sqlite3 sudo pm-utils libjs-jquery-tablesorter vlan curl genisoimage socat python-mox python-paste python-migrate python-greenlet python-libxml2 python-routes python-numpy python-pastedeploy python-eventlet python-cheetah python-tempita python-sqlalchemy python-suds python-lockfile python-m2crypto python-boto python-kombu python-feedparser python-iso8601 python-qpid tgt lvm2 qemu-utils libpq-dev open-iscsi open-iscsi-utils libxslt1-dev ebtables iptables iputils-ping iputils-arping sudo python-boto python-iso8601 python-paste python-routes python-suds python-pastedeploy python-greenlet python-kombu python-eventlet python-sqlalchemy python-mysqldb python-pyudev python-qpid dnsmasq-base dnsmasq-utils sqlite3 vlan curl gcc libffi-dev memcached python-configobj python-coverage python-eventlet python-greenlet python-netifaces python-nose python-pastedeploy python-simplejson python-webob python-xattr sqlite3 xfsprogs libnspr4-dev pkg-config libxml2-dev libxslt-dev +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install bridge-utils pylint python-setuptools screen unzip wget psmisc git lsof openssh-server openssl python-virtualenv python-unittest2 iputils-ping wget curl tcpdump euca2ools tar python-cmd2 python-dev python2.7 bc gcc libffi-dev libxml2-dev python-eventlet python-routes python-greenlet python-sqlalchemy python-wsgiref python-pastedeploy python-xattr python-iso8601 python-lxml python-pastescript python-pastedeploy python-paste sqlite3 python-pysqlite2 python-sqlalchemy python-mysqldb python-webob python-greenlet python-routes libldap2-dev libsasl2-dev python-dateutil msgpack-python fping dnsmasq-base dnsmasq-utils conntrack kpartx parted iputils-arping python-mysqldb python-xattr python-lxml gawk iptables ebtables sqlite3 sudo pm-utils libjs-jquery-tablesorter vlan curl genisoimage socat python-mox python-paste python-migrate python-greenlet python-libxml2 python-routes python-numpy python-pastedeploy python-eventlet python-cheetah python-tempita python-sqlalchemy python-suds python-lockfile python-m2crypto python-boto python-kombu python-feedparser python-iso8601 python-qpid tgt lvm2 qemu-utils libpq-dev open-iscsi open-iscsi-utils libxslt1-dev ebtables iptables iputils-ping iputils-arping sudo python-boto python-iso8601 python-paste python-routes python-suds python-pastedeploy python-greenlet python-kombu python-eventlet python-sqlalchemy python-mysqldb python-pyudev python-qpid dnsmasq-base dnsmasq-utils sqlite3 vlan curl gcc libffi-dev memcached python-configobj python-coverage python-eventlet python-greenlet python-netifaces python-nose python-pastedeploy python-simplejson python-webob python-xattr sqlite3 xfsprogs libnspr4-dev pkg-config libxml2-dev libxslt-dev ++ [[ -n False ]] ++ [[ False != \F\a\l\s\e ]] ++ date +%s ++ date + [[ False != \T\r\u\e ]] + /home/cloudbase/devstack/tools/install_pip.sh +++ dirname /home/cloudbase/devstack/tools/install_pip.sh ++ cd /home/cloudbase/devstack/tools ++ pwd + TOOLS_DIR=/home/cloudbase/devstack/tools ++ cd /home/cloudbase/devstack/tools/.. ++ pwd + TOP_DIR=/home/cloudbase/devstack + cd /home/cloudbase/devstack + source /home/cloudbase/devstack/functions ++++ dirname /home/cloudbase/devstack/functions +++ cd /home/cloudbase/devstack +++ pwd ++ FUNC_DIR=/home/cloudbase/devstack ++ source /home/cloudbase/devstack/functions-common ++++ set +o ++++ grep xtrace +++ XTRACE='set -o xtrace' +++ set +o xtrace +++ set +o +++ grep xtrace ++ XTRACE='set -o xtrace' ++ set +o xtrace + FILES=/home/cloudbase/devstack/files + PIP_GET_PIP_URL=https://bootstrap.pypa.io/get-pip.py ++ basename https://bootstrap.pypa.io/get-pip.py + LOCAL_PIP=/home/cloudbase/devstack/files/get-pip.py + GetDistro + GetOSVersion ++ which sw_vers + [[ -x '' ]] ++ which lsb_release + [[ -x /usr/bin/lsb_release ]] ++ lsb_release -i -s + os_VENDOR=Ubuntu ++ lsb_release -r -s + os_RELEASE=12.04 + os_UPDATE= + os_PACKAGE=rpm + [[ Debian,Ubuntu,LinuxMint =~ Ubuntu ]] + os_PACKAGE=deb ++ lsb_release -c -s + os_CODENAME=precise + export os_VENDOR os_RELEASE os_UPDATE os_PACKAGE os_CODENAME + [[ Ubuntu =~ (Ubuntu) ]] + DISTRO=precise + export DISTRO + echo 'Distro: precise' + get_versions ++ which pip + PIP=/usr/local/bin/pip + [[ -n /usr/local/bin/pip ]] ++ /usr/local/bin/pip --version ++ awk '{ print $2}' + PIP_VERSION=1.5.6 + echo 'pip: 1.5.6' + uninstall_package python-pip + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + apt_get purge python-pip ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes purge python-pip + install_get_pip + [[ ! -r /home/cloudbase/devstack/files/get-pip.py ]] + sudo -E python /home/cloudbase/devstack/files/get-pip.py + get_versions ++ which pip + PIP=/usr/local/bin/pip + [[ -n /usr/local/bin/pip ]] ++ /usr/local/bin/pip --version ++ awk '{ print $2}' + PIP_VERSION=1.5.6 + echo 'pip: 1.5.6' + /home/cloudbase/devstack/tools/fixup_stuff.sh +++ dirname /home/cloudbase/devstack/tools/fixup_stuff.sh ++ cd /home/cloudbase/devstack/tools ++ pwd + TOOLS_DIR=/home/cloudbase/devstack/tools ++ cd /home/cloudbase/devstack/tools/.. ++ pwd + TOP_DIR=/home/cloudbase/devstack + cd /home/cloudbase/devstack + source /home/cloudbase/devstack/functions ++++ dirname /home/cloudbase/devstack/functions +++ cd /home/cloudbase/devstack +++ pwd ++ FUNC_DIR=/home/cloudbase/devstack ++ source /home/cloudbase/devstack/functions-common ++++ set +o ++++ grep xtrace +++ XTRACE='set -o xtrace' +++ set +o xtrace +++ set +o +++ grep xtrace ++ XTRACE='set -o xtrace' ++ set +o xtrace + FILES=/home/cloudbase/devstack/files + pip_install 'prettytable>0.7' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.33xnW 'prettytable>0.7' + sudo rm -rf /tmp/pip-build.33xnW ++ get_package_path prettytable ++ local package=prettytable +++ python -c 'import os; import prettytable; print(os.path.split(os.path.realpath(prettytable.__file__))[0])' ++ echo /usr/local/lib/python2.7/dist-packages + PACKAGE_DIR=/usr/local/lib/python2.7/dist-packages ++ echo /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info + dir=/usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info + [[ -d /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info ]] + sudo chmod +r /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info/PKG-INFO /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info/SOURCES.txt /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info/dependency_links.txt /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info/installed-files.txt /usr/local/lib/python2.7/dist-packages/prettytable-0.7.2.egg-info/top_level.txt + pip_install httplib2 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.3g7Jr httplib2 + sudo rm -rf /tmp/pip-build.3g7Jr ++ get_package_path httplib2 ++ local package=httplib2 +++ python -c 'import os; import httplib2; print(os.path.split(os.path.realpath(httplib2.__file__))[0])' ++ echo /usr/local/lib/python2.7/dist-packages/httplib2 + PACKAGE_DIR=/usr/local/lib/python2.7/dist-packages/httplib2 ++ echo '/usr/local/lib/python2.7/dist-packages/httplib2-0.8*' + dir='/usr/local/lib/python2.7/dist-packages/httplib2-0.8*' + [[ -d /usr/local/lib/python2.7/dist-packages/httplib2-0.8* ]] + [[ precise =~ (precise) ]] ++ trueorfalse False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + UPGRADE_KERNEL=False + [[ False == \T\r\u\e ]] + [[ precise =~ (rhel6) ]] + install_rpc_backend + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 ++ mktemp + tfile=/tmp/tmp.AEuhHF5Hmk + install_package rabbitmq-server + cat /tmp/tmp.AEuhHF5Hmk + rm -f /tmp/tmp.AEuhHF5Hmk + is_service_enabled mysql postgresql ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_database + install_database_mysql + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + cat + sudo debconf-set-selections + [[ ! -e /home/cloudbase/.my.cnf ]] + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + [[ precise =~ (rhel7) ]] + install_package mysql-server ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install mysql-server ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install mysql-server + is_service_enabled neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_neutron_agent_packages + is_service_enabled q-agt q-dhcp q-l3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + neutron_plugin_install_agent_packages + _neutron_ovs_base_install_agent_packages + local kernel_version + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' ++ cat /proc/version ++ cut -d ' ' -f3 + kernel_version=3.11.0-26-generic + ovs_packages='make fakeroot dkms openvswitch-switch' ++ echo 3.11.0-26-generic ++ cut -d. -f1-2 + kernel_major_minor=3.11 ++ vercmp_numbers 3.11 3.13 ++ typeset v1=3.11 v2=3.13 sep ++ typeset -a ver1 ver2 ++ IFS=. ++ read -ra ver1 ++ IFS=. ++ read -ra ver2 ++ _vercmp_r 2 3 11 3 13 ++ typeset sep ++ ver1=() ++ ver2=() ++ typeset -a ver1 ver2 ++ sep=2 ++ shift ++ ver1=("${@:1:sep}") ++ ver2=("${@:sep+1}") ++ (( ver1 > ver2 )) ++ (( ver2 > ver1 )) ++ (( sep <= 1 )) ++ _vercmp_r 1 11 13 ++ typeset sep ++ ver1=() ++ ver2=() ++ typeset -a ver1 ver2 ++ sep=1 ++ shift ++ ver1=("${@:1:sep}") ++ ver2=("${@:sep+1}") ++ (( ver1 > ver2 )) ++ (( ver2 > ver1 )) ++ echo -1 ++ return 0 + '[' -1 -lt 0 ']' + ovs_packages='make fakeroot dkms openvswitch-switch openvswitch-datapath-dkms' + ovs_packages='make fakeroot dkms openvswitch-switch openvswitch-datapath-dkms linux-headers-3.11.0-26-generic' + install_package make fakeroot dkms openvswitch-switch openvswitch-datapath-dkms linux-headers-3.11.0-26-generic ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install make fakeroot dkms openvswitch-switch openvswitch-datapath-dkms linux-headers-3.11.0-26-generic ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install make fakeroot dkms openvswitch-switch openvswitch-datapath-dkms linux-headers-3.11.0-26-generic + is_service_enabled q-lbaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + neutron_agent_lbaas_install_agent_packages + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + install_package haproxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install haproxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install haproxy + TRACK_DEPENDS=False + [[ False = True ]] + echo_summary 'Installing OpenStack project source' + [[ -t 3 ]] + echo -e Installing OpenStack project source + install_infra + git_clone git://git.openstack.org/openstack/requirements.git /opt/stack/requirements stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/requirements.git + GIT_DEST=/opt/stack/requirements + GIT_REF=stable/icehouse ++ trueorfalse False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/requirements ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/requirements + git show --oneline + head -1 + cd /home/cloudbase/devstack + git_clone git://git.openstack.org/openstack-dev/pbr.git /opt/stack/pbr master + GIT_REMOTE=git://git.openstack.org/openstack-dev/pbr.git + GIT_DEST=/opt/stack/pbr + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/pbr ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/pbr + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/pbr + local project_dir=/opt/stack/pbr + setup_package_with_req_sync /opt/stack/pbr + local project_dir=/opt/stack/pbr + local flags= ++ cd /opt/stack/pbr ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/pbr + setup_package /opt/stack/pbr + local project_dir=/opt/stack/pbr + local flags= + pip_install /opt/stack/pbr ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.ieGi4 /opt/stack/pbr + sudo rm -rf /tmp/pip-build.ieGi4 + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/pbr + git reset --hard + install_oslo + cleanup_oslo + python -c 'import oslo.config' + git_clone git://git.openstack.org/openstack/cliff.git /opt/stack/cliff master + GIT_REMOTE=git://git.openstack.org/openstack/cliff.git + GIT_DEST=/opt/stack/cliff + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/cliff ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/cliff + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/cliff + local project_dir=/opt/stack/cliff + setup_package_with_req_sync /opt/stack/cliff + local project_dir=/opt/stack/cliff + local flags= ++ cd /opt/stack/cliff ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/cliff + setup_package /opt/stack/cliff + local project_dir=/opt/stack/cliff + local flags= + pip_install /opt/stack/cliff ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.4LOy8 /opt/stack/cliff + sudo rm -rf /tmp/pip-build.4LOy8 + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/cliff + git reset --hard + git_clone git://git.openstack.org/openstack/oslo.config.git /opt/stack/oslo.config master + GIT_REMOTE=git://git.openstack.org/openstack/oslo.config.git + GIT_DEST=/opt/stack/oslo.config + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/oslo.config ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/oslo.config + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/oslo.config + local project_dir=/opt/stack/oslo.config + setup_package_with_req_sync /opt/stack/oslo.config + local project_dir=/opt/stack/oslo.config + local flags= ++ cd /opt/stack/oslo.config ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/oslo.config + setup_package /opt/stack/oslo.config + local project_dir=/opt/stack/oslo.config + local flags= + pip_install /opt/stack/oslo.config ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.xPe3i /opt/stack/oslo.config + sudo rm -rf /tmp/pip-build.xPe3i + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/oslo.config + git reset --hard + git_clone git://git.openstack.org/openstack/oslo.messaging.git /opt/stack/oslo.messaging master + GIT_REMOTE=git://git.openstack.org/openstack/oslo.messaging.git + GIT_DEST=/opt/stack/oslo.messaging + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/oslo.messaging ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/oslo.messaging + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/oslo.messaging + local project_dir=/opt/stack/oslo.messaging + setup_package_with_req_sync /opt/stack/oslo.messaging + local project_dir=/opt/stack/oslo.messaging + local flags= ++ cd /opt/stack/oslo.messaging ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/oslo.messaging + setup_package /opt/stack/oslo.messaging + local project_dir=/opt/stack/oslo.messaging + local flags= + pip_install /opt/stack/oslo.messaging ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.J7TOc /opt/stack/oslo.messaging + sudo rm -rf /tmp/pip-build.J7TOc + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/oslo.messaging + git reset --hard + git_clone git://git.openstack.org/openstack/oslo.rootwrap.git /opt/stack/oslo.rootwrap master + GIT_REMOTE=git://git.openstack.org/openstack/oslo.rootwrap.git + GIT_DEST=/opt/stack/oslo.rootwrap + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/oslo.rootwrap ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/oslo.rootwrap + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/oslo.rootwrap + local project_dir=/opt/stack/oslo.rootwrap + setup_package_with_req_sync /opt/stack/oslo.rootwrap + local project_dir=/opt/stack/oslo.rootwrap + local flags= ++ cd /opt/stack/oslo.rootwrap ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/oslo.rootwrap + setup_package /opt/stack/oslo.rootwrap + local project_dir=/opt/stack/oslo.rootwrap + local flags= + pip_install /opt/stack/oslo.rootwrap ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.ouaDo /opt/stack/oslo.rootwrap + sudo rm -rf /tmp/pip-build.ouaDo + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/oslo.rootwrap + git reset --hard + git_clone git://git.openstack.org/openstack/oslo.vmware.git /opt/stack/oslo.vmware master + GIT_REMOTE=git://git.openstack.org/openstack/oslo.vmware.git + GIT_DEST=/opt/stack/oslo.vmware + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/oslo.vmware ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/oslo.vmware + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/oslo.vmware + local project_dir=/opt/stack/oslo.vmware + setup_package_with_req_sync /opt/stack/oslo.vmware + local project_dir=/opt/stack/oslo.vmware + local flags= ++ cd /opt/stack/oslo.vmware ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/oslo.vmware + setup_package /opt/stack/oslo.vmware + local project_dir=/opt/stack/oslo.vmware + local flags= + pip_install /opt/stack/oslo.vmware ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.7qX95 /opt/stack/oslo.vmware + sudo rm -rf /tmp/pip-build.7qX95 + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/oslo.vmware + git reset --hard + git_clone git://git.openstack.org/openstack/pycadf.git /opt/stack/pycadf master + GIT_REMOTE=git://git.openstack.org/openstack/pycadf.git + GIT_DEST=/opt/stack/pycadf + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/pycadf ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/pycadf + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/pycadf + local project_dir=/opt/stack/pycadf + setup_package_with_req_sync /opt/stack/pycadf + local project_dir=/opt/stack/pycadf + local flags= ++ cd /opt/stack/pycadf ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/pycadf + setup_package /opt/stack/pycadf + local project_dir=/opt/stack/pycadf + local flags= + pip_install /opt/stack/pycadf ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.RqRjh /opt/stack/pycadf + sudo rm -rf /tmp/pip-build.RqRjh + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/pycadf + git reset --hard + git_clone git://git.openstack.org/openstack/stevedore.git /opt/stack/stevedore master + GIT_REMOTE=git://git.openstack.org/openstack/stevedore.git + GIT_DEST=/opt/stack/stevedore + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/stevedore ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/stevedore + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/stevedore + local project_dir=/opt/stack/stevedore + setup_package_with_req_sync /opt/stack/stevedore + local project_dir=/opt/stack/stevedore + local flags= ++ cd /opt/stack/stevedore ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/stevedore + setup_package /opt/stack/stevedore + local project_dir=/opt/stack/stevedore + local flags= + pip_install /opt/stack/stevedore ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.hEpsx /opt/stack/stevedore + sudo rm -rf /tmp/pip-build.hEpsx + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/stevedore + git reset --hard + git_clone git://git.openstack.org/openstack/taskflow.git /opt/stack/taskflow master + GIT_REMOTE=git://git.openstack.org/openstack/taskflow.git + GIT_DEST=/opt/stack/taskflow + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/taskflow ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/taskflow + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_install /opt/stack/taskflow + local project_dir=/opt/stack/taskflow + setup_package_with_req_sync /opt/stack/taskflow + local project_dir=/opt/stack/taskflow + local flags= ++ cd /opt/stack/taskflow ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/taskflow + setup_package /opt/stack/taskflow + local project_dir=/opt/stack/taskflow + local flags= + pip_install /opt/stack/taskflow ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.YxvVL /opt/stack/taskflow + sudo rm -rf /tmp/pip-build.YxvVL + [[ '' == \-\e ]] + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/taskflow + git reset --hard + is_service_enabled stackforge_libs ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + install_keystoneclient + git_clone git://git.openstack.org/openstack/python-keystoneclient.git /opt/stack/python-keystoneclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-keystoneclient.git + GIT_DEST=/opt/stack/python-keystoneclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-keystoneclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-keystoneclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-keystoneclient + local project_dir=/opt/stack/python-keystoneclient + setup_package_with_req_sync /opt/stack/python-keystoneclient -e + local project_dir=/opt/stack/python-keystoneclient + local flags=-e ++ cd /opt/stack/python-keystoneclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-keystoneclient + setup_package /opt/stack/python-keystoneclient -e + local project_dir=/opt/stack/python-keystoneclient + local flags=-e + pip_install -e /opt/stack/python-keystoneclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.T0TnR -e /opt/stack/python-keystoneclient + sudo rm -rf /tmp/pip-build.T0TnR + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-keystoneclient/python_keystoneclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-keystoneclient/python_keystoneclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-keystoneclient/python_keystoneclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-keystoneclient + git reset --hard + sudo install -D -m 0644 -o cloudbase /opt/stack/python-keystoneclient/tools/keystone.bash_completion /etc/bash_completion.d/keystone.bash_completion + install_glanceclient + git_clone git://git.openstack.org/openstack/python-glanceclient.git /opt/stack/python-glanceclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-glanceclient.git + GIT_DEST=/opt/stack/python-glanceclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-glanceclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-glanceclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-glanceclient + local project_dir=/opt/stack/python-glanceclient + setup_package_with_req_sync /opt/stack/python-glanceclient -e + local project_dir=/opt/stack/python-glanceclient + local flags=-e ++ cd /opt/stack/python-glanceclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-glanceclient + setup_package /opt/stack/python-glanceclient -e + local project_dir=/opt/stack/python-glanceclient + local flags=-e + pip_install -e /opt/stack/python-glanceclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.wjlm5 -e /opt/stack/python-glanceclient + sudo rm -rf /tmp/pip-build.wjlm5 + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-glanceclient/python_glanceclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-glanceclient/python_glanceclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-glanceclient/python_glanceclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-glanceclient + git reset --hard + install_cinderclient + git_clone git://git.openstack.org/openstack/python-cinderclient.git /opt/stack/python-cinderclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-cinderclient.git + GIT_DEST=/opt/stack/python-cinderclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-cinderclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-cinderclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-cinderclient + local project_dir=/opt/stack/python-cinderclient + setup_package_with_req_sync /opt/stack/python-cinderclient -e + local project_dir=/opt/stack/python-cinderclient + local flags=-e ++ cd /opt/stack/python-cinderclient ++ git diff --exit-code ++ echo changed + local update_requirements=changed + [[ changed != \c\h\a\n\g\e\d ]] + setup_package /opt/stack/python-cinderclient -e + local project_dir=/opt/stack/python-cinderclient + local flags=-e + pip_install -e /opt/stack/python-cinderclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.4zsHF -e /opt/stack/python-cinderclient + sudo rm -rf /tmp/pip-build.4zsHF + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-cinderclient/python_cinderclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-cinderclient/python_cinderclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-cinderclient/python_cinderclient.egg-info + '[' True = True ']' + [[ changed != \c\h\a\n\g\e\d ]] + sudo install -D -m 0644 -o cloudbase /opt/stack/python-cinderclient/tools/cinder.bash_completion /etc/bash_completion.d/cinder.bash_completion + install_novaclient + git_clone git://git.openstack.org/openstack/python-novaclient.git /opt/stack/python-novaclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-novaclient.git + GIT_DEST=/opt/stack/python-novaclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-novaclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-novaclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-novaclient + local project_dir=/opt/stack/python-novaclient + setup_package_with_req_sync /opt/stack/python-novaclient -e + local project_dir=/opt/stack/python-novaclient + local flags=-e ++ cd /opt/stack/python-novaclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-novaclient + setup_package /opt/stack/python-novaclient -e + local project_dir=/opt/stack/python-novaclient + local flags=-e + pip_install -e /opt/stack/python-novaclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.65XWq -e /opt/stack/python-novaclient + sudo rm -rf /tmp/pip-build.65XWq + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-novaclient/python_novaclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-novaclient/python_novaclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-novaclient/python_novaclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-novaclient + git reset --hard + sudo install -D -m 0644 -o cloudbase /opt/stack/python-novaclient/tools/nova.bash_completion /etc/bash_completion.d/nova.bash_completion + is_service_enabled swift glance horizon ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_swiftclient + git_clone git://git.openstack.org/openstack/python-swiftclient.git /opt/stack/python-swiftclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-swiftclient.git + GIT_DEST=/opt/stack/python-swiftclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-swiftclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-swiftclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-swiftclient + local project_dir=/opt/stack/python-swiftclient + setup_package_with_req_sync /opt/stack/python-swiftclient -e + local project_dir=/opt/stack/python-swiftclient + local flags=-e ++ cd /opt/stack/python-swiftclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-swiftclient + setup_package /opt/stack/python-swiftclient -e + local project_dir=/opt/stack/python-swiftclient + local flags=-e + pip_install -e /opt/stack/python-swiftclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.Fsspf -e /opt/stack/python-swiftclient + sudo rm -rf /tmp/pip-build.Fsspf + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-swiftclient/python_swiftclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-swiftclient/python_swiftclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-swiftclient/python_swiftclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-swiftclient + git reset --hard + is_service_enabled neutron nova horizon ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_neutronclient + git_clone git://git.openstack.org/openstack/python-neutronclient.git /opt/stack/python-neutronclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-neutronclient.git + GIT_DEST=/opt/stack/python-neutronclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-neutronclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-neutronclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-neutronclient + local project_dir=/opt/stack/python-neutronclient + setup_package_with_req_sync /opt/stack/python-neutronclient -e + local project_dir=/opt/stack/python-neutronclient + local flags=-e ++ cd /opt/stack/python-neutronclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-neutronclient + setup_package /opt/stack/python-neutronclient -e + local project_dir=/opt/stack/python-neutronclient + local flags=-e + pip_install -e /opt/stack/python-neutronclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.ttf07 -e /opt/stack/python-neutronclient + sudo rm -rf /tmp/pip-build.ttf07 + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-neutronclient/python_neutronclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-neutronclient/python_neutronclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-neutronclient/python_neutronclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-neutronclient + git reset --hard + sudo install -D -m 0644 -o cloudbase /opt/stack/python-neutronclient/tools/neutron.bash_completion /etc/bash_completion.d/neutron.bash_completion + is_service_enabled heat horizon ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_heatclient + git_clone git://git.openstack.org/openstack/python-heatclient.git /opt/stack/python-heatclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-heatclient.git + GIT_DEST=/opt/stack/python-heatclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-heatclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-heatclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-heatclient + local project_dir=/opt/stack/python-heatclient + setup_package_with_req_sync /opt/stack/python-heatclient -e + local project_dir=/opt/stack/python-heatclient + local flags=-e ++ cd /opt/stack/python-heatclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-heatclient + setup_package /opt/stack/python-heatclient -e + local project_dir=/opt/stack/python-heatclient + local flags=-e + pip_install -e /opt/stack/python-heatclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.sDAwd -e /opt/stack/python-heatclient + sudo rm -rf /tmp/pip-build.sDAwd + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-heatclient/python_heatclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-heatclient/python_heatclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-heatclient/python_heatclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-heatclient + git reset --hard + sudo install -D -m 0644 -o cloudbase /opt/stack/python-heatclient/tools/heat.bash_completion /etc/bash_completion.d/heat.bash_completion + git_clone git://git.openstack.org/openstack/python-openstackclient.git /opt/stack/python-openstackclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-openstackclient.git + GIT_DEST=/opt/stack/python-openstackclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-openstackclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-openstackclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/python-openstackclient + local project_dir=/opt/stack/python-openstackclient + setup_package_with_req_sync /opt/stack/python-openstackclient -e + local project_dir=/opt/stack/python-openstackclient + local flags=-e ++ cd /opt/stack/python-openstackclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-openstackclient + setup_package /opt/stack/python-openstackclient -e + local project_dir=/opt/stack/python-openstackclient + local flags=-e + pip_install -e /opt/stack/python-openstackclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.iIkok -e /opt/stack/python-openstackclient + sudo rm -rf /tmp/pip-build.iIkok + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-openstackclient/python_openstackclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-openstackclient/python_openstackclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-openstackclient/python_openstackclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-openstackclient + git reset --hard + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_keystone + is_service_enabled ldap ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + [[ memcache = \m\e\m\c\a\c\h\e ]] + install_package memcached ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install memcached ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install memcached + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + install_package python-memcache ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install python-memcache ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install python-memcache + git_clone git://git.openstack.org/openstack/keystone.git /opt/stack/keystone stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/keystone.git + GIT_DEST=/opt/stack/keystone + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/keystone ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/keystone + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/keystone + local project_dir=/opt/stack/keystone + setup_package_with_req_sync /opt/stack/keystone -e + local project_dir=/opt/stack/keystone + local flags=-e ++ cd /opt/stack/keystone ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/keystone + setup_package /opt/stack/keystone -e + local project_dir=/opt/stack/keystone + local flags=-e + pip_install -e /opt/stack/keystone ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.1wjJc -e /opt/stack/keystone + sudo rm -rf /tmp/pip-build.1wjJc + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/keystone/keystone.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/keystone/keystone.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/keystone/keystone.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/keystone + git reset --hard + is_apache_enabled_service key + services=key + for service in '${services}' + [[ ,, =~ ,key, ]] + return 1 + configure_keystone + [[ ! -d /etc/keystone ]] + sudo chown cloudbase /etc/keystone + [[ /etc/keystone != \/\o\p\t\/\s\t\a\c\k\/\k\e\y\s\t\o\n\e\/\e\t\c ]] + cp -p /opt/stack/keystone/etc/keystone.conf.sample /etc/keystone/keystone.conf + chmod 600 /etc/keystone/keystone.conf + cp -p /opt/stack/keystone/etc/policy.json /etc/keystone + [[ -f /opt/stack/keystone/etc/keystone-paste.ini ]] + cp -p /opt/stack/keystone/etc/keystone-paste.ini /etc/keystone/keystone-paste.ini + [[ -f /etc/keystone/keystone-paste.ini ]] + iniset /etc/keystone/keystone.conf paste_deploy config_file /etc/keystone/keystone-paste.ini ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled ldap ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + [[ kvs,ldap,pam,sql =~ sql ]] + iniset /etc/keystone/keystone.conf identity driver keystone.identity.backends.sql.Identity ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ kvs,ldap,sql =~ sql ]] + iniset /etc/keystone/keystone.conf assignment driver keystone.assignment.backends.sql.Assignment ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/keystone/keystone.conf DEFAULT public_endpoint 'http://10.14.0.26:%(public_port)s/' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/keystone/keystone.conf DEFAULT admin_endpoint 'http://10.14.0.26:%(admin_port)s/' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/keystone/keystone.conf DEFAULT admin_bind_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_ssl_enabled_service key + local services=key + local service= + for service in '${services}' + [[ ,, =~ ,key, ]] + return 1 + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + iniset /etc/keystone/keystone.conf DEFAULT admin_token Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ PKI = \U\U\I\D ]] ++ database_connection_url keystone ++ local db=keystone ++ database_connection_url_mysql keystone ++ local db=keystone ++ echo 'mysql://root:Passw0rd@127.0.0.1/keystone?charset=utf8' + iniset /etc/keystone/keystone.conf database connection 'mysql://root:Passw0rd@127.0.0.1/keystone?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/keystone/keystone.conf ec2 driver keystone.contrib.ec2.backends.sql.Ec2 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ memcache = \s\q\l ]] + [[ memcache = \m\e\m\c\a\c\h\e ]] + iniset /etc/keystone/keystone.conf token driver keystone.token.backends.memcache.Token ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ sql = \s\q\l ]] + iniset /etc/keystone/keystone.conf catalog driver keystone.catalog.backends.sql.Catalog ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/keystone/keystone.conf catalog template_file ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' False '!=' False ']' + '[' False == True ']' + is_apache_enabled_service key + services=key + for service in '${services}' + [[ ,, =~ ,key, ]] + return 1 + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_swift + git_clone git://git.openstack.org/openstack/swift.git /opt/stack/swift stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/swift.git + GIT_DEST=/opt/stack/swift + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/swift ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/swift + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/swift + local project_dir=/opt/stack/swift + setup_package_with_req_sync /opt/stack/swift -e + local project_dir=/opt/stack/swift + local flags=-e ++ cd /opt/stack/swift ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/swift + setup_package /opt/stack/swift -e + local project_dir=/opt/stack/swift + local flags=-e + pip_install -e /opt/stack/swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.RBxmR -e /opt/stack/swift + sudo rm -rf /tmp/pip-build.RBxmR + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/swift/swift.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/swift/swift.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/swift/swift.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/swift + git reset --hard + is_apache_enabled_service swift + services=swift + for service in '${services}' + [[ ,, =~ ,swift, ]] + return 1 + configure_swift + local swift_pipeline=crossdomain + local node_number + local swift_node_config + local swift_log_dir + swift-init --run-dir=/opt/stack/data/swift/run all stop + true + sudo mkdir -p /etc/swift/object-server /etc/swift/container-server /etc/swift/account-server + sudo chown -R cloudbase: /etc/swift + [[ /etc/swift != \/\e\t\c\/\s\w\i\f\t ]] + sed -e ' s/%GROUP%//; s/%USER%/cloudbase/; s,%SWIFT_DATA_DIR%,/opt/stack/data/swift,; ' /home/cloudbase/devstack/files/swift/rsyncd.conf + sudo tee /etc/rsyncd.conf + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + sudo sed -i '/^RSYNC_ENABLE=false/ { s/false/true/ }' /etc/default/rsync + SWIFT_CONFIG_PROXY_SERVER=/etc/swift/proxy-server.conf + cp /opt/stack/swift/etc/proxy-server.conf-sample /etc/swift/proxy-server.conf + cp /opt/stack/swift/etc/container-sync-realms.conf-sample /etc/swift/container-sync-realms.conf + iniuncomment /etc/swift/proxy-server.conf DEFAULT user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf DEFAULT user cloudbase ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf DEFAULT swift_dir ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf DEFAULT swift_dir /etc/swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf DEFAULT workers ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf DEFAULT workers 1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf DEFAULT log_level ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf DEFAULT log_level DEBUG ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf DEFAULT bind_port ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf DEFAULT bind_port 8080 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf app:proxy-server node_timeout 120 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf app:proxy-server conn_timeout 20 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/swift/proxy-server.conf filter:ceilometer 'set log_level' WARN ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:ceilometer use egg:ceilometer#swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + SWIFT_EXTRAS_MIDDLEWARE_LAST=' ceilometer' + iniset /etc/swift/proxy-server.conf filter:proxy-logging reveal_sensitive_prefix 12 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled swift3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + swift_pipeline+=' authtoken keystoneauth tempauth ' + sed -i '/^pipeline/ { s/tempauth/crossdomain authtoken keystoneauth tempauth formpost staticweb/ ;}' /etc/swift/proxy-server.conf + sed -i '/^pipeline/ { s/proxy-server/ ceilometer proxy-server/ ; }' /etc/swift/proxy-server.conf + iniuncomment /etc/swift/proxy-server.conf filter:tempauth account_autocreate ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf app:proxy-server account_autocreate true ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf filter:tempauth reseller_prefix ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:tempauth reseller_prefix TEMPAUTH ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:crossdomain use egg:swift#crossdomain ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sed -i '/^# \[filter:authtoken\]/,/^# \[filter:keystoneauth\]$/ s/^#[ \t]*//' /etc/swift/proxy-server.conf + iniset /etc/swift/proxy-server.conf filter:authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken auth_uri http://10.14.0.26:5000/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken admin_user swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken signing_dir /var/cache/swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:authtoken log_name swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf filter:keystoneauth use ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/proxy-server.conf filter:keystoneauth operator_roles ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:keystoneauth operator_roles 'Member, admin' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled swift3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + cp /opt/stack/swift/etc/swift.conf-sample /etc/swift/swift.conf + iniset /etc/swift/swift.conf swift-hash swift_hash_path_suffix 66a3d6b56c1f479c8b4e70ab5d2014f6 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + for node_number in '${SWIFT_REPLICAS_SEQ}' + swift_node_config=/etc/swift/object-server/1.conf + cp /opt/stack/swift/etc/object-server.conf-sample /etc/swift/object-server/1.conf + generate_swift_config /etc/swift/object-server/1.conf 1 6013 object + local swift_node_config=/etc/swift/object-server/1.conf + local node_id=1 + local bind_port=6013 + local server_type=object + log_facility=0 + node_path=/opt/stack/data/swift/1 + iniuncomment /etc/swift/object-server/1.conf DEFAULT user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT user cloudbase ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT bind_port ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT bind_port 6013 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT swift_dir ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT swift_dir /etc/swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT devices ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT devices /opt/stack/data/swift/1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT log_facility ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT log_facility LOG_LOCAL0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT workers ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT workers 1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT disable_fallocate ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT disable_fallocate true ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf DEFAULT mount_check ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf DEFAULT mount_check false ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/object-server/1.conf object-replicator vm_test_mode ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf object-replicator vm_test_mode yes ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/object-server/1.conf filter:recon recon_cache_path /opt/stack/data/swift/cache ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sed -i -e 's,#[ ]*recon_cache_path .*,recon_cache_path = /opt/stack/data/swift/cache,' /etc/swift/object-server/1.conf + swift_node_config=/etc/swift/container-server/1.conf + cp /opt/stack/swift/etc/container-server.conf-sample /etc/swift/container-server/1.conf + generate_swift_config /etc/swift/container-server/1.conf 1 6011 container + local swift_node_config=/etc/swift/container-server/1.conf + local node_id=1 + local bind_port=6011 + local server_type=container + log_facility=0 + node_path=/opt/stack/data/swift/1 + iniuncomment /etc/swift/container-server/1.conf DEFAULT user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT user cloudbase ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT bind_port ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT bind_port 6011 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT swift_dir ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT swift_dir /etc/swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT devices ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT devices /opt/stack/data/swift/1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT log_facility ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT log_facility LOG_LOCAL0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT workers ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT workers 1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT disable_fallocate ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT disable_fallocate true ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf DEFAULT mount_check ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf DEFAULT mount_check false ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf container-replicator vm_test_mode ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf container-replicator vm_test_mode yes ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/container-server/1.conf app:container-server allow_versions ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/container-server/1.conf app:container-server allow_versions true ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sed -i -e 's,#[ ]*recon_cache_path .*,recon_cache_path = /opt/stack/data/swift/cache,' /etc/swift/container-server/1.conf + swift_node_config=/etc/swift/account-server/1.conf + cp /opt/stack/swift/etc/account-server.conf-sample /etc/swift/account-server/1.conf + generate_swift_config /etc/swift/account-server/1.conf 1 6012 account + local swift_node_config=/etc/swift/account-server/1.conf + local node_id=1 + local bind_port=6012 + local server_type=account + log_facility=0 + node_path=/opt/stack/data/swift/1 + iniuncomment /etc/swift/account-server/1.conf DEFAULT user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT user cloudbase ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT bind_port ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT bind_port 6012 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT swift_dir ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT swift_dir /etc/swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT devices ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT devices /opt/stack/data/swift/1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT log_facility ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT log_facility LOG_LOCAL0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT workers ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT workers 1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT disable_fallocate ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT disable_fallocate true ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf DEFAULT mount_check ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf DEFAULT mount_check false ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/swift/account-server/1.conf account-replicator vm_test_mode ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/account-server/1.conf account-replicator vm_test_mode yes ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sed -i -e 's,#[ ]*recon_cache_path .*,recon_cache_path = /opt/stack/data/swift/cache,' /etc/swift/account-server/1.conf + iniset /etc/swift/proxy-server.conf filter:tempauth user_swifttenanttest1_swiftusertest1 'testing .admin' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:tempauth user_swifttenanttest2_swiftusertest2 'testing2 .admin' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/proxy-server.conf filter:tempauth user_swifttenanttest1_swiftusertest3 'testing3 .admin' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + testfile=/etc/swift/test.conf + cp /opt/stack/swift/test/sample.conf /etc/swift/test.conf + iniset /etc/swift/test.conf func_test account swifttenanttest1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test username swiftusertest1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test username3 swiftusertest3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test account2 swifttenanttest2 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test username2 swiftusertest2 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniuncomment /etc/swift/test.conf func_test auth_version ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/swift/test.conf func_test auth_prefix /v2.0/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + swift_log_dir=/opt/stack/data/swift/logs + rm -rf /opt/stack/data/swift/logs + mkdir -p /opt/stack/data/swift/logs/hourly + sudo chown -R cloudbase:adm /opt/stack/data/swift/logs + sed s,%SWIFT_LOGDIR%,/opt/stack/data/swift/logs, /home/cloudbase/devstack/files/swift/rsyslog.conf + sudo tee /etc/rsyslog.d/10-swift.conf + sudo killall -HUP rsyslogd + is_apache_enabled_service swift + services=swift + for service in '${services}' + [[ ,, =~ ,swift, ]] + return 1 + is_service_enabled swift3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled g-api n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_glance + git_clone git://git.openstack.org/openstack/glance.git /opt/stack/glance stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/glance.git + GIT_DEST=/opt/stack/glance + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/glance ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/glance + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/glance + local project_dir=/opt/stack/glance + setup_package_with_req_sync /opt/stack/glance -e + local project_dir=/opt/stack/glance + local flags=-e ++ cd /opt/stack/glance ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/glance + setup_package /opt/stack/glance -e + local project_dir=/opt/stack/glance + local flags=-e + pip_install -e /opt/stack/glance ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.AKwwk -e /opt/stack/glance + sudo rm -rf /tmp/pip-build.AKwwk + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/glance/glance.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/glance/glance.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/glance/glance.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/glance + git reset --hard + configure_glance + [[ ! -d /etc/glance ]] + sudo chown cloudbase /etc/glance + cp /opt/stack/glance/etc/glance-registry.conf /etc/glance/glance-registry.conf + iniset /etc/glance/glance-registry.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/glance/glance-registry.conf DEFAULT log_file ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace ++ database_connection_url glance ++ local db=glance ++ database_connection_url_mysql glance ++ local db=glance ++ echo 'mysql://root:Passw0rd@127.0.0.1/glance?charset=utf8' + local 'dburl=mysql://root:Passw0rd@127.0.0.1/glance?charset=utf8' + iniset /etc/glance/glance-registry.conf DEFAULT sql_connection 'mysql://root:Passw0rd@127.0.0.1/glance?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf DEFAULT use_syslog False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf paste_deploy flavor keystone ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + configure_API_version /etc/glance/glance-registry.conf 2.0 + local conf_file=/etc/glance/glance-registry.conf + local api_version=2.0 + iniset /etc/glance/glance-registry.conf keystone_authtoken auth_uri http://10.14.0.26:5000/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken admin_user glance ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-registry.conf keystone_authtoken signing_dir /var/cache/glance/registry ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + cp /opt/stack/glance/etc/glance-api.conf /etc/glance/glance-api.conf + iniset /etc/glance/glance-api.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/glance/glance-api.conf DEFAULT log_file ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT sql_connection 'mysql://root:Passw0rd@127.0.0.1/glance?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT use_syslog False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT filesystem_store_datadir /opt/stack/data/glance/images/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT image_cache_dir /opt/stack/data/glance/cache/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf paste_deploy flavor keystone+cachemanagement ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + configure_API_version /etc/glance/glance-api.conf 2.0 + local conf_file=/etc/glance/glance-api.conf + local api_version=2.0 + iniset /etc/glance/glance-api.conf keystone_authtoken auth_uri http://10.14.0.26:5000/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken admin_user glance ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n 10.14.0.26 ']' + '[' -n Passw0rd ']' + iniset /etc/glance/glance-api.conf DEFAULT notification_driver messaging ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset_rpc_backend glance /etc/glance/glance-api.conf DEFAULT + local package=glance + local file=/etc/glance/glance-api.conf + local section=DEFAULT + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n '' ']' + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/glance/glance-api.conf DEFAULT rpc_backend glance.openstack.common.rpc.impl_kombu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT rabbit_hosts 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT rabbit_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf keystone_authtoken signing_dir /var/cache/glance/api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' libvirt = xenserver ']' + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/glance/glance-api.conf DEFAULT default_store swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT swift_store_auth_address http://10.14.0.26:5000/v2.0/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT swift_store_user service:glance-swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT swift_store_key Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT swift_store_create_container_on_put True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-api.conf DEFAULT known_stores 'glance.store.filesystem.Store, glance.store.http.Store, glance.store.swift.Store' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + cp -p /opt/stack/glance/etc/glance-registry-paste.ini /etc/glance/glance-registry-paste.ini + cp -p /opt/stack/glance/etc/glance-api-paste.ini /etc/glance/glance-api-paste.ini + cp /opt/stack/glance/etc/glance-cache.conf /etc/glance/glance-cache.conf + iniset /etc/glance/glance-cache.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/glance/glance-cache.conf DEFAULT log_file ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT use_syslog False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT filesystem_store_datadir /opt/stack/data/glance/images/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT image_cache_dir /opt/stack/data/glance/cache/ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/glance/glance-cache.conf DEFAULT auth_url ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT auth_url http://10.14.0.26:35357/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/glance/glance-cache.conf DEFAULT auth_tenant_name ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/glance/glance-cache.conf DEFAULT auth_user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT admin_user glance ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniuncomment /etc/glance/glance-cache.conf DEFAULT auth_password ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/glance/glance-cache.conf DEFAULT admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + cp -p /opt/stack/glance/etc/policy.json /etc/glance/policy.json + cp -p /opt/stack/glance/etc/schema-image.json /etc/glance/schema-image.json + is_service_enabled cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_cinder + git_clone git://git.openstack.org/openstack/cinder.git /opt/stack/cinder stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/cinder.git + GIT_DEST=/opt/stack/cinder + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/cinder ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/cinder + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/cinder + local project_dir=/opt/stack/cinder + setup_package_with_req_sync /opt/stack/cinder -e + local project_dir=/opt/stack/cinder + local flags=-e ++ cd /opt/stack/cinder ++ git diff --exit-code ++ echo changed + local update_requirements=changed + [[ changed != \c\h\a\n\g\e\d ]] + setup_package /opt/stack/cinder -e + local project_dir=/opt/stack/cinder + local flags=-e + pip_install -e /opt/stack/cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.xfr3k -e /opt/stack/cinder + sudo rm -rf /tmp/pip-build.xfr3k + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/cinder/cinder.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/cinder/cinder.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/cinder/cinder.egg-info + '[' True = True ']' + [[ changed != \c\h\a\n\g\e\d ]] + configure_cinder + [[ ! -d /etc/cinder ]] + sudo chown cloudbase /etc/cinder + cp -p /opt/stack/cinder/etc/cinder/policy.json /etc/cinder + configure_cinder_rootwrap ++ get_rootwrap_location cinder ++ local module=cinder +++ get_python_exec_prefix +++ is_fedora +++ [[ -z Ubuntu ]] +++ '[' Ubuntu = Fedora ']' +++ '[' Ubuntu = 'Red Hat' ']' +++ '[' Ubuntu = CentOS ']' +++ is_suse +++ [[ -z Ubuntu ]] +++ '[' Ubuntu = openSUSE ']' +++ '[' Ubuntu = 'SUSE LINUX' ']' +++ echo /usr/local/bin ++ echo /usr/local/bin/cinder-rootwrap + CINDER_ROOTWRAP=/usr/local/bin/cinder-rootwrap + [[ -d /etc/cinder/rootwrap.d ]] + sudo rm -rf /etc/cinder/rootwrap.d + sudo mkdir -m 755 /etc/cinder/rootwrap.d + sudo cp /opt/stack/cinder/etc/cinder/rootwrap.d/volume.filters /etc/cinder/rootwrap.d + sudo chown -R root:root /etc/cinder/rootwrap.d + sudo chmod 644 /etc/cinder/rootwrap.d/volume.filters + sudo cp /opt/stack/cinder/etc/cinder/rootwrap.conf /etc/cinder/ + sudo sed -e 's:^filters_path=.*$:filters_path=/etc/cinder/rootwrap.d:' -i /etc/cinder/rootwrap.conf + sudo chown root:root /etc/cinder/rootwrap.conf + sudo chmod 0644 /etc/cinder/rootwrap.conf + ROOTWRAP_CSUDOER_CMD='/usr/local/bin/cinder-rootwrap /etc/cinder/rootwrap.conf *' ++ mktemp + TEMPFILE=/tmp/tmp.Zam9rmsPRh + echo 'cloudbase ALL=(root) NOPASSWD: /usr/local/bin/cinder-rootwrap /etc/cinder/rootwrap.conf *' + chmod 0440 /tmp/tmp.Zam9rmsPRh + sudo chown root:root /tmp/tmp.Zam9rmsPRh + sudo mv /tmp/tmp.Zam9rmsPRh /etc/sudoers.d/cinder-rootwrap + cp /opt/stack/cinder/etc/cinder/api-paste.ini /etc/cinder/api-paste.ini + inicomment /etc/cinder/api-paste.ini filter:authtoken auth_host ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken auth_port ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken auth_protocol ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken admin_tenant_name ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken admin_user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken admin_password ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/cinder/api-paste.ini filter:authtoken signing_dir ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + cp /opt/stack/cinder/etc/cinder/cinder.conf.sample /etc/cinder/cinder.conf + iniset /etc/cinder/cinder.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken admin_user cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken signing_dir /var/cache/cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT auth_strategy keystone ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' False = True ']' + iniset /etc/cinder/cinder.conf DEFAULT volume_group stack-volumes ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT volume_name_template volume-%s ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT my_ip 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT iscsi_helper tgtadm ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace ++ database_connection_url cinder ++ local db=cinder ++ database_connection_url_mysql cinder ++ local db=cinder ++ echo 'mysql://root:Passw0rd@127.0.0.1/cinder?charset=utf8' + iniset /etc/cinder/cinder.conf DEFAULT sql_connection 'mysql://root:Passw0rd@127.0.0.1/cinder?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT api_paste_config /etc/cinder/api-paste.ini ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT rootwrap_config /etc/cinder/rootwrap.conf ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT osapi_volume_extension cinder.api.contrib.standard_extensions ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT state_path /opt/stack/data/cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT lock_path /opt/stack/data/cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT periodic_interval 60 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/cinder/cinder.conf DEFAULT backup_swift_url http://10.14.0.26:8080/v1/AUTH_ ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/cinder/cinder.conf DEFAULT notification_driver cinder.openstack.common.notifier.rpc_notifier ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' False '!=' False ']' + iniset_rpc_backend cinder /etc/cinder/cinder.conf DEFAULT + local package=cinder + local file=/etc/cinder/cinder.conf + local section=DEFAULT + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n '' ']' + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/cinder/cinder.conf DEFAULT rpc_backend cinder.openstack.common.rpc.impl_kombu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT rabbit_hosts 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT rabbit_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ False == \F\a\l\s\e ]] + iniset /etc/cinder/cinder.conf DEFAULT secure_delete False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf DEFAULT volume_clear none ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' False == True ']' + [[ -r /home/cloudbase/devstack/lib/cinder_plugins/default ]] + [[ -n is_fedora ]] + [[ precise =~ (rhel6) ]] + configure_API_version /etc/cinder/cinder.conf 2.0 + local conf_file=/etc/cinder/cinder.conf + local api_version=2.0 + iniset /etc/cinder/cinder.conf keystone_authtoken auth_uri http://10.14.0.26:5000/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken admin_user cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/cinder/cinder.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_neutron + git_clone git://git.openstack.org/openstack/neutron.git /opt/stack/neutron stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/neutron.git + GIT_DEST=/opt/stack/neutron + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/neutron ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/neutron + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/neutron + local project_dir=/opt/stack/neutron + setup_package_with_req_sync /opt/stack/neutron -e + local project_dir=/opt/stack/neutron + local flags=-e ++ cd /opt/stack/neutron ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/neutron + setup_package /opt/stack/neutron -e + local project_dir=/opt/stack/neutron + local flags=-e + pip_install -e /opt/stack/neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.vUn4G -e /opt/stack/neutron + sudo rm -rf /tmp/pip-build.vUn4G + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/neutron/neutron.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/neutron/neutron.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/neutron/neutron.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/neutron + git reset --hard + install_neutron_third_party + _neutron_third_party_do install + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_nova + is_service_enabled n-cpu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled n-novnc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled n-spice ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + git_clone git://git.openstack.org/openstack/nova.git /opt/stack/nova stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/nova.git + GIT_DEST=/opt/stack/nova + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/nova ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/nova + git show --oneline + head -1 + cd /home/cloudbase/devstack + setup_develop /opt/stack/nova + local project_dir=/opt/stack/nova + setup_package_with_req_sync /opt/stack/nova -e + local project_dir=/opt/stack/nova + local flags=-e ++ cd /opt/stack/nova ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/nova + setup_package /opt/stack/nova -e + local project_dir=/opt/stack/nova + local flags=-e + pip_install -e /opt/stack/nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.eOnSy -e /opt/stack/nova + sudo rm -rf /tmp/pip-build.eOnSy + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/nova/nova.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/nova/nova.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/nova/nova.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/nova + git reset --hard + sudo install -D -m 0644 -o cloudbase /opt/stack/nova/tools/nova-manage.bash_completion /etc/bash_completion.d/nova-manage.bash_completion + cleanup_nova + is_service_enabled n-cpu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + sudo rm -rf /opt/stack/data/nova /var/cache/nova + configure_nova + [[ ! -d /etc/nova ]] + sudo chown cloudbase /etc/nova + cp -p /opt/stack/nova/etc/nova/policy.json /etc/nova + configure_nova_rootwrap + [[ -d /etc/nova/rootwrap.d ]] + sudo rm -rf /etc/nova/rootwrap.d + sudo mkdir -m 755 /etc/nova/rootwrap.d + sudo cp /opt/stack/nova/etc/nova/rootwrap.d/api-metadata.filters /opt/stack/nova/etc/nova/rootwrap.d/baremetal-compute-ipmi.filters /opt/stack/nova/etc/nova/rootwrap.d/baremetal-deploy-helper.filters /opt/stack/nova/etc/nova/rootwrap.d/compute.filters /opt/stack/nova/etc/nova/rootwrap.d/network.filters /etc/nova/rootwrap.d + sudo chown -R root:root /etc/nova/rootwrap.d + sudo chmod 644 /etc/nova/rootwrap.d/api-metadata.filters /etc/nova/rootwrap.d/baremetal-compute-ipmi.filters /etc/nova/rootwrap.d/baremetal-deploy-helper.filters /etc/nova/rootwrap.d/compute.filters /etc/nova/rootwrap.d/network.filters + sudo cp /opt/stack/nova/etc/nova/rootwrap.conf /etc/nova/ + sudo sed -e 's:^filters_path=.*$:filters_path=/etc/nova/rootwrap.d:' -i /etc/nova/rootwrap.conf + sudo chown root:root /etc/nova/rootwrap.conf + sudo chmod 0644 /etc/nova/rootwrap.conf + ROOTWRAP_SUDOER_CMD='/usr/local/bin/nova-rootwrap /etc/nova/rootwrap.conf *' ++ mktemp + TEMPFILE=/tmp/tmp.Wdew5vhDxU + echo 'cloudbase ALL=(root) NOPASSWD: /usr/local/bin/nova-rootwrap /etc/nova/rootwrap.conf *' + chmod 0440 /tmp/tmp.Wdew5vhDxU + sudo chown root:root /tmp/tmp.Wdew5vhDxU + sudo mv /tmp/tmp.Wdew5vhDxU /etc/sudoers.d/nova-rootwrap + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + rm -f /opt/stack/nova/bin/nova-api-paste.ini + cp /opt/stack/nova/etc/nova/api-paste.ini /etc/nova + inicomment /etc/nova/api-paste.ini filter:authtoken auth_host ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/nova/api-paste.ini filter:authtoken auth_protocol ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/nova/api-paste.ini filter:authtoken admin_tenant_name ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/nova/api-paste.ini filter:authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/nova/api-paste.ini filter:authtoken admin_user ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/nova/api-paste.ini filter:authtoken admin_password ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + inicomment /etc/nova/api-paste.ini filter:authtoken signing_dir ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled n-cpu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + create_nova_conf + rm -f /opt/stack/nova/bin/nova.conf + rm -f /etc/nova/nova.conf + iniset /etc/nova/nova.conf DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT auth_strategy keystone ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT allow_resize_to_same_host True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT api_paste_config /etc/nova/api-paste.ini ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT rootwrap_config /etc/nova/rootwrap.conf ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT scheduler_driver nova.scheduler.filter_scheduler.FilterScheduler ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT dhcpbridge_flagfile /etc/nova/nova.conf ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT force_dhcp_release True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT fixed_range '' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT default_floating_pool public ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT s3_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT s3_port 3333 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT osapi_compute_extension nova.api.openstack.compute.contrib.standard_extensions ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT my_ip 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT osapi_compute_workers 4 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT ec2_workers 4 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT metadata_workers 4 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf conductor workers 4 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace ++ database_connection_url nova ++ local db=nova ++ database_connection_url_mysql nova ++ local db=nova ++ echo 'mysql://root:Passw0rd@127.0.0.1/nova?charset=utf8' + iniset /etc/nova/nova.conf DEFAULT sql_connection 'mysql://root:Passw0rd@127.0.0.1/nova?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT instance_name_template instance-%08x ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf osapi_v3 enabled True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + is_suse + [[ -z Ubuntu ]] + '[' Ubuntu = openSUSE ']' + '[' Ubuntu = 'SUSE LINUX' ']' + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled n-api-meta ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + iniset /etc/nova/nova.conf DEFAULT enabled_apis ec2,osapi_compute,metadata ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + iniset /etc/nova/nova.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken admin_user nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf keystone_authtoken signing_dir /var/cache/nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/nova/nova.conf DEFAULT volume_api_class nova.volume.cinder.API ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' -n /opt/stack/data/nova ']' + iniset /etc/nova/nova.conf DEFAULT state_path /opt/stack/data/nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT lock_path /opt/stack/data/nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' -n /opt/stack/data/nova/instances ']' + iniset /etc/nova/nova.conf DEFAULT instances_path /opt/stack/data/nova/instances ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' False '!=' False ']' + '[' False '!=' False ']' + '[' False '!=' True ']' + iniset /etc/nova/nova.conf DEFAULT api_rate_limit False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' always '!=' False ']' + iniset /etc/nova/nova.conf DEFAULT force_config_drive always ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' False == True ']' + iniset /etc/nova/nova.conf DEFAULT logging_context_format_string '%(asctime)s.%(msecs)03d %(levelname)s %(name)s [%(request_id)s %(user_name)s %(project_name)s] %(instance)s%(message)s' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/nova/nova.conf DEFAULT instance_usage_audit True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT instance_usage_audit_period hour ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT notify_on_state_change vm_and_task_state ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT notification_driver messaging ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ -z '' ]] + [[ -n '' ]] + is_service_enabled n-cpu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled n-novnc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled n-xvnc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + VNCSERVER_LISTEN=127.0.0.1 + VNCSERVER_PROXYCLIENT_ADDRESS=127.0.0.1 + iniset /etc/nova/nova.conf DEFAULT vnc_enabled true ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT vncserver_listen 127.0.0.1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT vncserver_proxyclient_address 127.0.0.1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled n-spice ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + iniset /etc/nova/nova.conf spice enabled false ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT ec2_dmz_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset_rpc_backend nova /etc/nova/nova.conf DEFAULT + local package=nova + local file=/etc/nova/nova.conf + local section=DEFAULT + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n '' ']' + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/nova/nova.conf DEFAULT rpc_backend nova.openstack.common.rpc.impl_kombu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT rabbit_hosts 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT rabbit_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT glance_api_servers 10.14.0.26:9292 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled n-cpu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled horizon ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_ceilometerclient + git_clone git://git.openstack.org/openstack/python-ceilometerclient.git /opt/stack/python-ceilometerclient master + GIT_REMOTE=git://git.openstack.org/openstack/python-ceilometerclient.git + GIT_DEST=/opt/stack/python-ceilometerclient + GIT_REF=master ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo master + egrep -q '^refs' + [[ ! -d /opt/stack/python-ceilometerclient ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/python-ceilometerclient + git show --oneline + head -1 + cd /home/cloudbase/devstack + install_ceilometer + git_clone git://git.openstack.org/openstack/ceilometer.git /opt/stack/ceilometer stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/ceilometer.git + GIT_DEST=/opt/stack/ceilometer + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/ceilometer ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/ceilometer + git show --oneline + head -1 + cd /home/cloudbase/devstack + echo_summary 'Configuring Ceilometer' + [[ -t 3 ]] + echo -e Configuring Ceilometer + configure_ceilometer + setup_develop /opt/stack/ceilometer + local project_dir=/opt/stack/ceilometer + setup_package_with_req_sync /opt/stack/ceilometer -e + local project_dir=/opt/stack/ceilometer + local flags=-e ++ cd /opt/stack/ceilometer ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/ceilometer + setup_package /opt/stack/ceilometer -e + local project_dir=/opt/stack/ceilometer + local flags=-e + pip_install -e /opt/stack/ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.QM0JS -e /opt/stack/ceilometer + sudo rm -rf /tmp/pip-build.QM0JS + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/ceilometer/ceilometer.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/ceilometer/ceilometer.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/ceilometer/ceilometer.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/ceilometer + git reset --hard + '[' '!' -d /etc/ceilometer ']' + sudo chown cloudbase /etc/ceilometer + '[' '!' -d /var/log/ceilometer-api ']' + sudo chown cloudbase /var/log/ceilometer-api + iniset_rpc_backend ceilometer /etc/ceilometer/ceilometer.conf DEFAULT + local package=ceilometer + local file=/etc/ceilometer/ceilometer.conf + local section=DEFAULT + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n '' ']' + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/ceilometer/ceilometer.conf DEFAULT rpc_backend ceilometer.openstack.common.rpc.impl_kombu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT rabbit_hosts 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT rabbit_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT notification_topics notifications ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + cp /opt/stack/ceilometer/etc/ceilometer/policy.json /etc/ceilometer + cp /opt/stack/ceilometer/etc/ceilometer/pipeline.yaml /etc/ceilometer + iniset /etc/ceilometer/ceilometer.conf DEFAULT policy_file /etc/ceilometer/policy.json ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' '' ']' + iniset /etc/ceilometer/ceilometer.conf DEFAULT os_username ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT os_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf DEFAULT os_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken admin_user ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/ceilometer/ceilometer.conf keystone_authtoken signing_dir /var/cache/ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' mongodb = mysql ']' + '[' mongodb = postgresql ']' + iniset /etc/ceilometer/ceilometer.conf database connection mongodb://localhost:27017/ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + configure_mongodb + local packages=mongodb-server + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + packages='mongodb-server python-pymongo' + install_package mongodb-server python-pymongo ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install mongodb-server python-pymongo ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install mongodb-server python-pymongo + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + sleep 5 + cleanup_ceilometer + '[' mongodb '!=' mysql ']' + '[' mongodb '!=' postgresql ']' + mongo ceilometer --eval 'db.dropDatabase();' + [[ libvirt = \v\s\p\h\e\r\e ]] + configure_ceilometerclient + setup_develop /opt/stack/python-ceilometerclient + local project_dir=/opt/stack/python-ceilometerclient + setup_package_with_req_sync /opt/stack/python-ceilometerclient -e + local project_dir=/opt/stack/python-ceilometerclient + local flags=-e ++ cd /opt/stack/python-ceilometerclient ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/python-ceilometerclient + setup_package /opt/stack/python-ceilometerclient -e + local project_dir=/opt/stack/python-ceilometerclient + local flags=-e + pip_install -e /opt/stack/python-ceilometerclient ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.JGzRc -e /opt/stack/python-ceilometerclient + sudo rm -rf /tmp/pip-build.JGzRc + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/python-ceilometerclient/python_ceilometerclient.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/python-ceilometerclient/python_ceilometerclient.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/python-ceilometerclient/python_ceilometerclient.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/python-ceilometerclient + git reset --hard + sudo install -D -m 0644 -o cloudbase /opt/stack/python-ceilometerclient/tools/ceilometer.bash_completion /etc/bash_completion.d/ceilometer.bash_completion + is_service_enabled heat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + install_heat + git_clone git://git.openstack.org/openstack/heat.git /opt/stack/heat stable/icehouse + GIT_REMOTE=git://git.openstack.org/openstack/heat.git + GIT_DEST=/opt/stack/heat + GIT_REF=stable/icehouse ++ trueorfalse False False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + RECLONE=False ++ pwd + local orig_dir=/home/cloudbase/devstack + [[ False = \T\r\u\e ]] + echo stable/icehouse + egrep -q '^refs' + [[ ! -d /opt/stack/heat ]] + [[ False = \T\r\u\e ]] + cd /opt/stack/heat + git show --oneline + head -1 + cd /home/cloudbase/devstack + cleanup_heat + sudo rm -rf /var/cache/heat + sudo rm -rf /etc/heat/environment.d + sudo rm -rf /etc/heat/templates + configure_heat + setup_develop /opt/stack/heat + local project_dir=/opt/stack/heat + setup_package_with_req_sync /opt/stack/heat -e + local project_dir=/opt/stack/heat + local flags=-e ++ cd /opt/stack/heat ++ git diff --exit-code + local update_requirements= + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/requirements + python update.py /opt/stack/heat + setup_package /opt/stack/heat -e + local project_dir=/opt/stack/heat + local flags=-e + pip_install -e /opt/stack/heat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.lkSiu -e /opt/stack/heat + sudo rm -rf /tmp/pip-build.lkSiu + [[ -e == \-\e ]] + safe_chown -R cloudbase /opt/stack/heat/heat.egg-info + _safe_permission_operation chown -R cloudbase /opt/stack/heat/heat.egg-info ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo chown -R cloudbase /opt/stack/heat/heat.egg-info + '[' True = True ']' + [[ '' != \c\h\a\n\g\e\d ]] + cd /opt/stack/heat + git reset --hard + [[ ! -d /etc/heat ]] + sudo chown cloudbase /etc/heat + rm -f '/etc/heat/heat-*.conf' + HEAT_API_CFN_HOST=10.14.0.26 + HEAT_API_CFN_PORT=8000 + HEAT_ENGINE_HOST=10.14.0.26 + HEAT_ENGINE_PORT=8001 + HEAT_API_CW_HOST=10.14.0.26 + HEAT_API_CW_PORT=8003 + HEAT_API_HOST=10.14.0.26 + HEAT_API_PORT=8004 + HEAT_API_PASTE_FILE=/etc/heat/api-paste.ini + HEAT_POLICY_FILE=/etc/heat/policy.json + cp /opt/stack/heat/etc/heat/api-paste.ini /etc/heat/api-paste.ini + cp /opt/stack/heat/etc/heat/policy.json /etc/heat/policy.json + cp /opt/stack/heat/etc/heat/heat.conf.sample /etc/heat/heat.conf + iniset_rpc_backend heat /etc/heat/heat.conf DEFAULT + local package=heat + local file=/etc/heat/heat.conf + local section=DEFAULT + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n '' ']' + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/heat/heat.conf DEFAULT rpc_backend heat.openstack.common.rpc.impl_kombu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT rabbit_hosts 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT rabbit_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT heat_metadata_server_url http://10.14.0.26:8000 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT heat_waitcondition_server_url http://10.14.0.26:8000/v1/waitcondition ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT heat_watch_server_url http://10.14.0.26:8003 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace ++ database_connection_url heat ++ local db=heat ++ database_connection_url_mysql heat ++ local db=heat ++ echo 'mysql://root:Passw0rd@127.0.0.1/heat?charset=utf8' + iniset /etc/heat/heat.conf database connection 'mysql://root:Passw0rd@127.0.0.1/heat?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace ++ hexdump -n 16 -v -e '/1 "%02x"' /dev/urandom + iniset /etc/heat/heat.conf DEFAULT auth_encryption_key 95e8a68017138735b898fcd2fea3a13e ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT use_syslog False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + '[' False == True ']' + iniset /etc/heat/heat.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + configure_API_version /etc/heat/heat.conf 2.0 + local conf_file=/etc/heat/heat.conf + local api_version=2.0 + iniset /etc/heat/heat.conf keystone_authtoken auth_uri http://10.14.0.26:5000/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken cafile ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken admin_user heat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf keystone_authtoken signing_dir /var/cache/heat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf ec2authtoken auth_uri http://10.14.0.26:5000/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ False = \T\r\u\e ]] + iniset /etc/heat/heat.conf heat_api bind_port 8004 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf heat_api_cfn bind_port 8000 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf heat_api_cloudwatch bind_port 8003 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo mkdir -p /etc/heat/environment.d + sudo chown cloudbase /etc/heat/environment.d + cp /opt/stack/heat/etc/heat/environment.d/default.yaml /etc/heat/environment.d/ + sudo mkdir -p /etc/heat/templates + sudo chown cloudbase /etc/heat/templates + cp /opt/stack/heat/etc/heat/templates/AWS_CloudWatch_Alarm.yaml /opt/stack/heat/etc/heat/templates/AWS_RDS_DBInstance.yaml /etc/heat/templates/ + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + [[ -d /home/cloudbase/devstack/extras.d ]] + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/50-ironic.sh ]] + source /home/cloudbase/devstack/extras.d/50-ironic.sh stack install ++ is_service_enabled ir-api ir-cond +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-gantt.sh ]] + source /home/cloudbase/devstack/extras.d/70-gantt.sh stack install ++ is_service_enabled n-sch +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ disable_service gantt ++ local tmpsvcs=,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, ++ local service ++ for service in '$@' ++ is_service_enabled gantt +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 +++ _cleanup_service_list ,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, +++ echo ,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, +++ sed -e ' s/,,/,/g; s/^,//; s/,$// ' ++ ENABLED_SERVICES=g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api ++ is_service_enabled gantt +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-marconi.sh ]] + source /home/cloudbase/devstack/extras.d/70-marconi.sh stack install ++ is_service_enabled marconi-server +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-sahara.sh ]] + source /home/cloudbase/devstack/extras.d/70-sahara.sh stack install ++ is_service_enabled sahara +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-trove.sh ]] + source /home/cloudbase/devstack/extras.d/70-trove.sh stack install ++ is_service_enabled trove +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/80-opendaylight.sh ]] + source /home/cloudbase/devstack/extras.d/80-opendaylight.sh stack install ++ is_service_enabled odl-server odl-compute +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ is_service_enabled odl-server +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ is_service_enabled odl-compute +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/80-tempest.sh ]] + source /home/cloudbase/devstack/extras.d/80-tempest.sh stack install ++ is_service_enabled tempest +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ [[ stack == \s\o\u\r\c\e ]] ++ [[ stack == \s\t\a\c\k ]] ++ [[ install == \i\n\s\t\a\l\l ]] ++ echo_summary 'Installing Tempest' ++ [[ -t 3 ]] ++ echo -e Installing Tempest ++ install_tempest ++ git_clone git://git.openstack.org/openstack/tempest.git /opt/stack/tempest master ++ GIT_REMOTE=git://git.openstack.org/openstack/tempest.git ++ GIT_DEST=/opt/stack/tempest ++ GIT_REF=master +++ trueorfalse False False ++++ set +o ++++ grep xtrace +++ local 'xtrace=set -o xtrace' +++ set +o xtrace ++ RECLONE=False +++ pwd ++ local orig_dir=/home/cloudbase/devstack ++ [[ False = \T\r\u\e ]] ++ echo master ++ egrep -q '^refs' ++ [[ ! -d /opt/stack/tempest ]] ++ [[ False = \T\r\u\e ]] ++ cd /opt/stack/tempest ++ git show --oneline ++ head -1 ++ cd /home/cloudbase/devstack ++ pip_install 'tox<1.7' +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.PGddR 'tox<1.7' ++ sudo rm -rf /tmp/pip-build.PGddR ++ [[ stack == \u\n\s\t\a\c\k ]] ++ [[ stack == \c\l\e\a\n ]] + [[ False = True ]] + [[ False != \F\a\l\s\e ]] + restart_rpc_backend + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting RabbitMQ' + [[ -t 3 ]] + echo -e Starting RabbitMQ ++ seq 10 + for i in '`seq 10`' + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + is_suse + [[ -z Ubuntu ]] + '[' Ubuntu = openSUSE ']' + '[' Ubuntu = 'SUSE LINUX' ']' + sudo rabbitmqctl change_password guest Passw0rd ls: cannot access /etc/rabbitmq/rabbitmq.conf.d: No such file or directory + break + is_service_enabled n-cell ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -f /opt/stack/data/ca-bundle.pem ']' + is_service_enabled mysql postgresql ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + configure_database + configure_database_mysql + echo_summary 'Configuring and starting MySQL' + [[ -t 3 ]] + echo -e Configuring and starting MySQL + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + MY_CONF=/etc/mysql/my.cnf + MYSQL=mysql + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + is_suse + [[ -z Ubuntu ]] + '[' Ubuntu = openSUSE ']' + '[' Ubuntu = 'SUSE LINUX' ']' + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + sudo mysql -uroot -pPassw0rd -h127.0.0.1 -e 'GRANT ALL PRIVILEGES ON *.* TO '\''root'\''@'\''%'\'' identified by '\''Passw0rd'\'';' + sudo bash -c 'source /home/cloudbase/devstack/functions && iniset /etc/mysql/my.cnf mysqld bind-address 0.0.0.0 && iniset /etc/mysql/my.cnf mysqld default-storage-engine InnoDB' + [[ True == \T\r\u\e ]] + echo_summary 'Enabling MySQL query logging' + [[ -t 3 ]] + echo -e Enabling MySQL query logging + sudo sed -e /log.slow.queries/d -e /long.query.time/d -e /log.queries.not.using.indexes/d -i /etc/mysql/my.cnf + sudo bash -c 'source /home/cloudbase/devstack/functions && iniset /etc/mysql/my.cnf mysqld slow-query-log 1 && iniset /etc/mysql/my.cnf mysqld slow-query-log-file /var/log/mysql/mysql-slow.log && iniset /etc/mysql/my.cnf mysqld long-query-time 0 && iniset /etc/mysql/my.cnf mysqld log-queries-not-using-indexes 1' + restart_service mysql + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + sudo /usr/sbin/service mysql restart ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + [[ True == \T\r\u\e ]] + screen -d -m -S stack -t shell -s /bin/bash + sleep 1 + '[' -z '' ']' + SCREEN_HARDSTATUS='%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})' + screen -r stack -X hardstatus alwayslastline '%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})' + screen -r stack -X setenv PROMPT_COMMAND /bin/true + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ -e /home/cloudbase/devstack/stack-screenrc ]] + rm -f /home/cloudbase/devstack/stack-screenrc + init_service_check + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status + [[ ! -d /opt/stack/status/stack ]] + rm -f /opt/stack/status/stack/ceilometer-api.failure /opt/stack/status/stack/h-api-cfn.failure /opt/stack/status/stack/h-api-cw.failure /opt/stack/status/stack/h-api.failure + DSTAT_OPTS='-tcmndrylp --top-cpu-adv' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen_it dstat 'cd /home/cloudbase/devstack; dstat -tcmndrylp --top-cpu-adv | tee /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/dstat.txt' + is_service_enabled dstat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Keystone' + [[ -t 3 ]] + echo -e Starting Keystone + init_keystone + is_service_enabled ldap ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + recreate_database keystone utf8 + local db=keystone + local charset=utf8 + recreate_database_mysql keystone utf8 + local db=keystone + local charset=utf8 + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'DROP DATABASE IF EXISTS keystone;' + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'CREATE DATABASE keystone CHARACTER SET utf8;' + /opt/stack/keystone/bin/keystone-manage db_sync + [[ PKI == \P\K\I ]] + rm -rf /etc/keystone/ssl + /opt/stack/keystone/bin/keystone-manage pki_setup Generating RSA private key, 2048 bit long modulus .............+++ ....+++ e is 65537 (0x10001) Generating RSA private key, 2048 bit long modulus .................................................+++ ..............................................................................................................+++ e is 65537 (0x10001) Using configuration from /etc/keystone/ssl/certs/openssl.conf Check that the request matches the signature Signature ok The Subject's Distinguished Name is as follows countryName :PRINTABLE:'US' stateOrProvinceName :ASN.1 12:'Unset' localityName :ASN.1 12:'Unset' organizationName :ASN.1 12:'Unset' commonName :ASN.1 12:'www.example.com' Certificate is to be certified until Oct 8 23:35:49 2024 GMT (3650 days) Write out database with 1 new entries Data Base Updated + sudo mkdir -p /var/cache/keystone + sudo chown cloudbase /var/cache/keystone + rm -f '/var/cache/keystone/*' + start_keystone + local service_port=5000 + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_apache_enabled_service key + services=key + for service in '${services}' + [[ ,, =~ ,key, ]] + return 1 + run_process key '/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug' + local service=key + local 'command=/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug' + local group= + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service key '/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug' '' + local service=key + local 'command=/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc key '/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + echo 'sessionname stack' + echo 'hardstatus alwayslastline '\''%{= .} %-Lw%{= .}%> %n%f %t*%{= .}%+Lw%< %-=%{g}(%{d}%H/%l%{g})'\''' + echo 'setenv PROMPT_COMMAND /bin/true' + echo 'screen -t shell bash' + grep key /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t key bash' + echo 'stuff "/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-key.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t key + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p key -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-key.2014-10-12-023444.log + screen -S stack -p key -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-key.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-key.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p key -X stuff '/opt/stack/keystone/bin/keystone-all --config-file /etc/keystone/keystone.conf --debug & echo $! >/opt/stack/status/stack/key.pid; fg || echo "key failed to start" | tee "/opt/stack/status/stack/key.failure" ' + echo 'Waiting for keystone to start...' + timeout 60 sh -c 'while ! curl --noproxy '\''*'\'' -k -s http://10.14.0.26:5000/v2.0/ >/dev/null; do sleep 1; done' + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + SERVICE_ENDPOINT=http://10.14.0.26:35357/v2.0 + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + export OS_TOKEN=Passw0rd + OS_TOKEN=Passw0rd + export OS_URL=http://10.14.0.26:35357/v2.0 + OS_URL=http://10.14.0.26:35357/v2.0 + create_keystone_accounts ++ openstack project create admin ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 730ecfe3234240fe82078df7808414fb |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + ADMIN_TENANT=730ecfe3234240fe82078df7808414fb ++ openstack user create admin --project 730ecfe3234240fe82078df7808414fb --email admin@example.com --password Passw0rd ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 868279de31974cb4b30d0e34baca515f |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + ADMIN_USER=868279de31974cb4b30d0e34baca515f ++ openstack role create admin ++ get_field 2 ++ read data ++ grep ' id ' ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 2595a17c0e024b2986cc7011b8113453 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 + openstack role add 2595a17c0e024b2986cc7011b8113453 --project 730ecfe3234240fe82078df7808414fb --user 868279de31974cb4b30d0e34baca515f + openstack project create service + openstack role create service + openstack role create ResellerAdmin ++ openstack role create Member ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 375c267cfa8740d6b3139b6d933274cb |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + MEMBER_ROLE=375c267cfa8740d6b3139b6d933274cb ++ openstack role create anotherrole ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 519808ccaf8b4142ae31dbf216eb8dac |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + ANOTHER_ROLE=519808ccaf8b4142ae31dbf216eb8dac ++ openstack project create invisible_to_admin ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 3f496858f9b9412f8f3d1fe52c4640a2 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + INVIS_TENANT=3f496858f9b9412f8f3d1fe52c4640a2 ++ openstack project create demo ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 70fae7298c364df6bd617e37e0df13e8 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + DEMO_TENANT=70fae7298c364df6bd617e37e0df13e8 ++ openstack user create demo --project 70fae7298c364df6bd617e37e0df13e8 --email demo@example.com --password Passw0rd ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 8c7e8affb74940b19ec2c7e76832797d |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + DEMO_USER=8c7e8affb74940b19ec2c7e76832797d + openstack role add --project 70fae7298c364df6bd617e37e0df13e8 --user 8c7e8affb74940b19ec2c7e76832797d 375c267cfa8740d6b3139b6d933274cb + openstack role add --project 70fae7298c364df6bd617e37e0df13e8 --user 868279de31974cb4b30d0e34baca515f 2595a17c0e024b2986cc7011b8113453 + openstack role add --project 70fae7298c364df6bd617e37e0df13e8 --user 8c7e8affb74940b19ec2c7e76832797d 519808ccaf8b4142ae31dbf216eb8dac + openstack role add --project 3f496858f9b9412f8f3d1fe52c4640a2 --user 8c7e8affb74940b19ec2c7e76832797d 375c267cfa8740d6b3139b6d933274cb + [[ sql = \s\q\l ]] ++ openstack service create keystone --type identity --description 'Keystone Identity Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 17f9a82713c7498d83d53bb5566b4737 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + KEYSTONE_SERVICE=17f9a82713c7498d83d53bb5566b4737 + openstack endpoint create 17f9a82713c7498d83d53bb5566b4737 --region RegionOne --publicurl http://10.14.0.26:5000/v2.0 --adminurl http://10.14.0.26:35357/v2.0 --internalurl http://10.14.0.26:5000/v2.0 + create_nova_accounts ++ openstack project list ++ awk '/ service / { print $2 }' + SERVICE_TENANT=a0ab893ec5894bde968f3d6d06537d17 ++ openstack role list ++ awk '/ admin / { print $2 }' + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ n-api ]] ++ openstack user create nova --password Passw0rd --project a0ab893ec5894bde968f3d6d06537d17 --email nova@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 5a79bb7183fc44eba6cc4e9968abf72c |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + NOVA_USER=5a79bb7183fc44eba6cc4e9968abf72c + openstack role add 2595a17c0e024b2986cc7011b8113453 --project a0ab893ec5894bde968f3d6d06537d17 --user 5a79bb7183fc44eba6cc4e9968abf72c + [[ sql = \s\q\l ]] ++ openstack service create nova --type=compute '--description=Nova Compute Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | e22f3cec48b34bd983aea6e70e6f929e |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + NOVA_SERVICE=e22f3cec48b34bd983aea6e70e6f929e + openstack endpoint create e22f3cec48b34bd983aea6e70e6f929e --region RegionOne --publicurl 'http://10.14.0.26:8774/v2/$(tenant_id)s' --adminurl 'http://10.14.0.26:8774/v2/$(tenant_id)s' --internalurl 'http://10.14.0.26:8774/v2/$(tenant_id)s' ++ openstack service create novav3 --type=computev3 '--description=Nova Compute Service V3' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | fc905e97999746bdb82fb81af6cee8a6 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + NOVA_V3_SERVICE=fc905e97999746bdb82fb81af6cee8a6 + openstack endpoint create fc905e97999746bdb82fb81af6cee8a6 --region RegionOne --publicurl http://10.14.0.26:8774/v3 --adminurl http://10.14.0.26:8774/v3 --internalurl http://10.14.0.26:8774/v3 + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + openstack role add --project service --user nova ResellerAdmin + [[ sql = \s\q\l ]] + openstack service create --type ec2 --description 'EC2 Compatibility Layer' ec2 + openstack endpoint create --region RegionOne --publicurl http://10.14.0.26:8773/services/Cloud --adminurl http://10.14.0.26:8773/services/Admin --internalurl http://10.14.0.26:8773/services/Cloud ec2 + is_service_enabled n-obj swift3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ sql = \s\q\l ]] + openstack service create --type s3 --description S3 s3 + openstack endpoint create --region RegionOne --publicurl http://10.14.0.26:3333 --adminurl http://10.14.0.26:3333 --internalurl http://10.14.0.26:3333 s3 + create_glance_accounts + is_service_enabled g-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + openstack user create --password Passw0rd --project service glance + openstack role add --project service --user glance service + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + openstack user create --password Passw0rd --project service glance-swift + openstack role add --project service --user glance-swift ResellerAdmin + [[ sql = \s\q\l ]] + openstack service create --type image --description 'Glance Image Service' glance + openstack endpoint create --region RegionOne --publicurl http://10.14.0.26:9292 --adminurl http://10.14.0.26:9292 --internalurl http://10.14.0.26:9292 glance + create_cinder_accounts ++ openstack project list ++ awk '/ service / { print $2 }' + SERVICE_TENANT=a0ab893ec5894bde968f3d6d06537d17 ++ openstack role list ++ awk '/ admin / { print $2 }' + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ c-api ]] ++ openstack user create cinder --password Passw0rd --project a0ab893ec5894bde968f3d6d06537d17 --email cinder@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | b10c5453cd3f4f76b06125d607c02a16 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + CINDER_USER=b10c5453cd3f4f76b06125d607c02a16 + openstack role add 2595a17c0e024b2986cc7011b8113453 --project a0ab893ec5894bde968f3d6d06537d17 --user b10c5453cd3f4f76b06125d607c02a16 + [[ sql = \s\q\l ]] ++ openstack service create cinder --type=volume '--description=Cinder Volume Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 302de5d9e5644fb69abdccba4659db41 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + CINDER_SERVICE=302de5d9e5644fb69abdccba4659db41 + openstack endpoint create 302de5d9e5644fb69abdccba4659db41 --region RegionOne --publicurl 'http://10.14.0.26:8776/v1/$(tenant_id)s' --adminurl 'http://10.14.0.26:8776/v1/$(tenant_id)s' --internalurl 'http://10.14.0.26:8776/v1/$(tenant_id)s' ++ openstack service create cinderv2 --type=volumev2 '--description=Cinder Volume Service V2' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 21ee5dfaf7e64012ae4605153219ea9a |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + CINDER_V2_SERVICE=21ee5dfaf7e64012ae4605153219ea9a + openstack endpoint create 21ee5dfaf7e64012ae4605153219ea9a --region RegionOne --publicurl 'http://10.14.0.26:8776/v2/$(tenant_id)s' --adminurl 'http://10.14.0.26:8776/v2/$(tenant_id)s' --internalurl 'http://10.14.0.26:8776/v2/$(tenant_id)s' + create_neutron_accounts ++ openstack project list ++ awk '/ service / { print $2 }' + SERVICE_TENANT=a0ab893ec5894bde968f3d6d06537d17 ++ openstack role list ++ awk '/ admin / { print $2 }' + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ q-svc ]] ++ openstack user create neutron --password Passw0rd --project a0ab893ec5894bde968f3d6d06537d17 --email neutron@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | e9dc3d40a56041d2b54e8d43fc9453e8 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + NEUTRON_USER=e9dc3d40a56041d2b54e8d43fc9453e8 + openstack role add 2595a17c0e024b2986cc7011b8113453 --project a0ab893ec5894bde968f3d6d06537d17 --user e9dc3d40a56041d2b54e8d43fc9453e8 + [[ sql = \s\q\l ]] ++ openstack service create neutron --type=network '--description=Neutron Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 732f337fdeed4dcca2148be5384b0a15 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + NEUTRON_SERVICE=732f337fdeed4dcca2148be5384b0a15 + openstack endpoint create 732f337fdeed4dcca2148be5384b0a15 --region RegionOne --publicurl http://10.14.0.26:9696/ --adminurl http://10.14.0.26:9696/ --internalurl http://10.14.0.26:9696/ + is_service_enabled ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + create_ceilometer_accounts ++ openstack project list ++ awk '/ service / { print $2 }' + SERVICE_TENANT=a0ab893ec5894bde968f3d6d06537d17 ++ openstack role list ++ awk '/ admin / { print $2 }' + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ ceilometer-api ]] ++ openstack user create ceilometer --password Passw0rd --project a0ab893ec5894bde968f3d6d06537d17 --email ceilometer@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | e3545f9fe24045e891c06369300a1160 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + CEILOMETER_USER=e3545f9fe24045e891c06369300a1160 + openstack role add 2595a17c0e024b2986cc7011b8113453 --project a0ab893ec5894bde968f3d6d06537d17 --user e3545f9fe24045e891c06369300a1160 + [[ sql = \s\q\l ]] ++ openstack service create ceilometer --type=metering '--description=OpenStack Telemetry Service' ++ get_field 2 ++ grep ' id ' ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 00efcb4fc8344c488e3f3aa19ad25528 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + CEILOMETER_SERVICE=00efcb4fc8344c488e3f3aa19ad25528 + openstack endpoint create 00efcb4fc8344c488e3f3aa19ad25528 --region RegionOne --publicurl http://10.14.0.26:8777/ --adminurl http://10.14.0.26:8777/ --internalurl http://10.14.0.26:8777/ + is_service_enabled swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + openstack role add --project service --user ceilometer ResellerAdmin + is_service_enabled swift ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + create_swift_accounts + SWIFTUSERTEST1_PASSWORD=testing + SWIFTUSERTEST2_PASSWORD=testing2 + SWIFTUSERTEST3_PASSWORD=testing3 + KEYSTONE_CATALOG_BACKEND=sql ++ openstack project list ++ awk '/ service / { print $2 }' + SERVICE_TENANT=a0ab893ec5894bde968f3d6d06537d17 ++ openstack role list ++ awk '/ admin / { print $2 }' + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 ++ openstack user create swift --password Passw0rd --project a0ab893ec5894bde968f3d6d06537d17 --email=swift@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 13023b81c635418db75710c160175d15 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_USER=13023b81c635418db75710c160175d15 + openstack role add 2595a17c0e024b2986cc7011b8113453 --project a0ab893ec5894bde968f3d6d06537d17 --user 13023b81c635418db75710c160175d15 + [[ sql = \s\q\l ]] ++ openstack service create swift --type=object-store '--description=Swift Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | b981a9f59e97438e949daf6c6169ddd4 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_SERVICE=b981a9f59e97438e949daf6c6169ddd4 + openstack endpoint create b981a9f59e97438e949daf6c6169ddd4 --region RegionOne --publicurl 'http://10.14.0.26:8080/v1/AUTH_$(tenant_id)s' --adminurl http://10.14.0.26:8080 --internalurl 'http://10.14.0.26:8080/v1/AUTH_$(tenant_id)s' ++ openstack project create swifttenanttest1 ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | c5afdf06f190483abdeced505563e007 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_TENANT_TEST1=c5afdf06f190483abdeced505563e007 + die_if_not_set 564 SWIFT_TENANT_TEST1 'Failure creating SWIFT_TENANT_TEST1' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace ++ openstack user create swiftusertest1 --password=testing --project c5afdf06f190483abdeced505563e007 --email=test@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 45f3baab2fea409abe817ee7d404ab64 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_USER_TEST1=45f3baab2fea409abe817ee7d404ab64 + die_if_not_set 567 SWIFT_USER_TEST1 'Failure creating SWIFT_USER_TEST1' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + openstack role add --user 45f3baab2fea409abe817ee7d404ab64 --project c5afdf06f190483abdeced505563e007 2595a17c0e024b2986cc7011b8113453 ++ openstack user create swiftusertest3 --password=testing3 --project c5afdf06f190483abdeced505563e007 --email=test3@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 75aa805b3ed549fb8411bc06f70f103f |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_USER_TEST3=75aa805b3ed549fb8411bc06f70f103f + die_if_not_set 572 SWIFT_USER_TEST3 'Failure creating SWIFT_USER_TEST3' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + openstack role add --user 75aa805b3ed549fb8411bc06f70f103f --project c5afdf06f190483abdeced505563e007 519808ccaf8b4142ae31dbf216eb8dac ++ openstack project create swifttenanttest2 ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 5d1a99fbf32049fe8009477341e9f85c |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_TENANT_TEST2=5d1a99fbf32049fe8009477341e9f85c + die_if_not_set 576 SWIFT_TENANT_TEST2 'Failure creating SWIFT_TENANT_TEST2' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace ++ openstack user create swiftusertest2 --password=testing2 --project 5d1a99fbf32049fe8009477341e9f85c --email=test2@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 2a82c4afaba74026a2aac56f46697206 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SWIFT_USER_TEST2=2a82c4afaba74026a2aac56f46697206 + die_if_not_set 580 SWIFT_USER_TEST2 'Failure creating SWIFT_USER_TEST2' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + openstack role add --user 2a82c4afaba74026a2aac56f46697206 --project 5d1a99fbf32049fe8009477341e9f85c 2595a17c0e024b2986cc7011b8113453 + is_service_enabled heat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + create_heat_accounts ++ openstack project list ++ awk '/ service / { print $2 }' + SERVICE_TENANT=a0ab893ec5894bde968f3d6d06537d17 ++ openstack role list ++ awk '/ admin / { print $2 }' + ADMIN_ROLE=2595a17c0e024b2986cc7011b8113453 ++ openstack user create heat --password Passw0rd --project a0ab893ec5894bde968f3d6d06537d17 --email heat@example.com ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 19b15685eb744c3c98470e48829876f1 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + HEAT_USER=19b15685eb744c3c98470e48829876f1 + openstack role add 2595a17c0e024b2986cc7011b8113453 --project a0ab893ec5894bde968f3d6d06537d17 --user 19b15685eb744c3c98470e48829876f1 + [[ sql = \s\q\l ]] ++ openstack service create heat --type=orchestration '--description=Heat Orchestration Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 5f7e3322faa345458605f4319e4538ff |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + HEAT_SERVICE=5f7e3322faa345458605f4319e4538ff + openstack endpoint create 5f7e3322faa345458605f4319e4538ff --region RegionOne --publicurl 'http://10.14.0.26:8004/v1/$(tenant_id)s' --adminurl 'http://10.14.0.26:8004/v1/$(tenant_id)s' --internalurl 'http://10.14.0.26:8004/v1/$(tenant_id)s' ++ openstack service create heat --type=cloudformation '--description=Heat CloudFormation Service' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | dacf21cbac564339a1af6050333b64b0 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + HEAT_CFN_SERVICE=dacf21cbac564339a1af6050333b64b0 + openstack endpoint create dacf21cbac564339a1af6050333b64b0 --region RegionOne --publicurl http://10.14.0.26:8000/v1 --adminurl http://10.14.0.26:8000/v1 --internalurl http://10.14.0.26:8000/v1 + openstack role create heat_stack_user + [[ trusts == trusts ]] ++ openstack role create heat_stack_owner ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | e9e388657211491dafd580942de7ee76 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + HEAT_OWNER_ROLE=e9e388657211491dafd580942de7ee76 + openstack role add e9e388657211491dafd580942de7ee76 --project demo --user demo + openstack role add e9e388657211491dafd580942de7ee76 --project demo --user admin + openstack role add e9e388657211491dafd580942de7ee76 --project admin --user admin + iniset /etc/heat/heat.conf DEFAULT deferred_auth_method trusts ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ True == \T\r\u\e ]] + KS_ENDPOINT_V3=http://10.14.0.26:5000/v3 ++ openstack --os-token Passw0rd --os-url=http://10.14.0.26:5000/v3 --os-identity-api-version=3 domain create heat --description 'Owns users and projects created by heat' ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 07bc036e8a1e4058b8f1f71816002ab2 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + D_ID=07bc036e8a1e4058b8f1f71816002ab2 + iniset /etc/heat/heat.conf DEFAULT stack_user_domain 07bc036e8a1e4058b8f1f71816002ab2 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + openstack --os-token Passw0rd --os-url=http://10.14.0.26:5000/v3 --os-identity-api-version=3 user create --password Passw0rd --domain 07bc036e8a1e4058b8f1f71816002ab2 heat_domain_admin --description 'Manages users and projects created by heat' + openstack --os-token Passw0rd --os-url=http://10.14.0.26:5000/v3 --os-identity-api-version=3 role add --user heat_domain_admin --domain 07bc036e8a1e4058b8f1f71816002ab2 admin + iniset /etc/heat/heat.conf DEFAULT stack_domain_admin heat_domain_admin ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/heat/heat.conf DEFAULT stack_domain_admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + unset OS_TOKEN OS_URL + export OS_AUTH_URL=http://10.14.0.26:35357/v2.0 + OS_AUTH_URL=http://10.14.0.26:35357/v2.0 + export OS_TENANT_NAME=admin + OS_TENANT_NAME=admin + export OS_USERNAME=admin + OS_USERNAME=admin + export OS_PASSWORD=Passw0rd + OS_PASSWORD=Passw0rd + is_service_enabled horizon ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled g-reg ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Configuring Glance' + [[ -t 3 ]] + echo -e Configuring Glance + init_glance + rm -rf /opt/stack/data/glance/images + mkdir -p /opt/stack/data/glance/images + rm -rf /opt/stack/data/glance/cache + mkdir -p /opt/stack/data/glance/cache + recreate_database glance utf8 + local db=glance + local charset=utf8 + recreate_database_mysql glance utf8 + local db=glance + local charset=utf8 + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'DROP DATABASE IF EXISTS glance;' + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'CREATE DATABASE glance CHARACTER SET utf8;' + /usr/local/bin/glance-manage db_sync 2014-10-12 02:36:25.505 16010 DEBUG glance.openstack.common.db.sqlalchemy.session [-] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION _mysql_check_effective_sql_mode /opt/stack/glance/glance/openstack/common/db/sqlalchemy/session.py:562 2014-10-12 02:36:25.506 16010 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/glance/glance/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.507 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.508 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.509 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_add_protected.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_add_protected.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.510 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/023_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/023_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/024_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/024_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/025_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/025_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.511 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/028_owner_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/028_owner_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.512 16010 DEBUG migrate.versioning.repository [-] Repository /opt/stack/glance/glance/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:36:25.513 16010 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'Glance Migrations'), ('version_table', 'migrate_version'), ('required_dbs', '[]')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:36:25.525 16010 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/glance/glance/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:36:25.526 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.526 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.526 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.526 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.526 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.527 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.528 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_add_protected.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_add_protected.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.529 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/023_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/023_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/024_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/024_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/025_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/025_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.530 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/028_owner_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/028_owner_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.531 16010 DEBUG migrate.versioning.repository [-] Repository /opt/stack/glance/glance/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:36:25.532 16010 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'Glance Migrations'), ('version_table', 'migrate_version'), ('required_dbs', '[]')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:36:25.577 16010 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/glance/glance/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.578 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.579 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/007_add_owner.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.580 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_add_protected.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/013_add_protected.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.581 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/023_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/023_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/024_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/024_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/025_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/025_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/028_owner_index.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/028_owner_index.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.582 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.script.base [-] Script /opt/stack/glance/glance/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.repository [-] Repository /opt/stack/glance/glance/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:36:25.583 16010 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'Glance Migrations'), ('version_table', 'migrate_version'), ('required_dbs', '[]')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:36:25.588 16010 INFO migrate.versioning.api [-] 0 -> 1... 2014-10-12 02:36:25.610 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table images 2014-10-12 02:36:25.716 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:25.716 16010 INFO migrate.versioning.api [-] 1 -> 2... 2014-10-12 02:36:25.723 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table image_properties 2014-10-12 02:36:25.869 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:25.869 16010 INFO migrate.versioning.api [-] 2 -> 3... 2014-10-12 02:36:26.036 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.037 16010 INFO migrate.versioning.api [-] 3 -> 4... 2014-10-12 02:36:26.097 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.097 16010 INFO migrate.versioning.api [-] 4 -> 5... 2014-10-12 02:36:26.165 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.165 16010 INFO migrate.versioning.api [-] 5 -> 6... 2014-10-12 02:36:26.294 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.295 16010 INFO migrate.versioning.api [-] 6 -> 7... 2014-10-12 02:36:26.367 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.367 16010 INFO migrate.versioning.api [-] 7 -> 8... 2014-10-12 02:36:26.374 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table image_members 2014-10-12 02:36:26.536 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.536 16010 INFO migrate.versioning.api [-] 8 -> 9... 2014-10-12 02:36:26.652 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.652 16010 INFO migrate.versioning.api [-] 9 -> 10... 2014-10-12 02:36:26.668 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.668 16010 INFO migrate.versioning.api [-] 10 -> 11... 2014-10-12 02:36:26.782 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:26.782 16010 INFO migrate.versioning.api [-] 11 -> 12... 2014-10-12 02:36:27.155 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.155 16010 INFO migrate.versioning.api [-] 12 -> 13... 2014-10-12 02:36:27.219 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.219 16010 INFO migrate.versioning.api [-] 13 -> 14... 2014-10-12 02:36:27.229 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table image_tags 2014-10-12 02:36:27.355 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.355 16010 INFO migrate.versioning.api [-] 14 -> 15... 2014-10-12 02:36:27.388 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.389 16010 INFO migrate.versioning.api [-] 15 -> 16... 2014-10-12 02:36:27.471 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.471 16010 INFO migrate.versioning.api [-] 16 -> 17... 2014-10-12 02:36:27.519 16010 INFO 017_quote_encrypted_swift_credentials [-] 'metadata_encryption_key' was not specified in the config file or a config file was not specified. This means that this migration is a NOOP. 2014-10-12 02:36:27.532 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.532 16010 INFO migrate.versioning.api [-] 17 -> 18... 2014-10-12 02:36:27.542 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table image_locations 2014-10-12 02:36:27.662 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.662 16010 INFO migrate.versioning.api [-] 18 -> 19... 2014-10-12 02:36:27.680 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.680 16010 INFO migrate.versioning.api [-] 19 -> 20... 2014-10-12 02:36:27.747 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.748 16010 INFO migrate.versioning.api [-] 20 -> 21... 2014-10-12 02:36:27.759 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.759 16010 INFO migrate.versioning.api [-] 21 -> 22... 2014-10-12 02:36:27.840 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.840 16010 INFO migrate.versioning.api [-] 22 -> 23... 2014-10-12 02:36:27.852 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.852 16010 INFO migrate.versioning.api [-] 23 -> 24... 2014-10-12 02:36:27.867 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.867 16010 INFO migrate.versioning.api [-] 24 -> 25... 2014-10-12 02:36:27.881 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.881 16010 INFO migrate.versioning.api [-] 25 -> 26... 2014-10-12 02:36:27.945 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:27.945 16010 INFO migrate.versioning.api [-] 26 -> 27... 2014-10-12 02:36:28.004 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.004 16010 INFO migrate.versioning.api [-] 27 -> 28... 2014-10-12 02:36:28.062 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.062 16010 INFO migrate.versioning.api [-] 28 -> 29... 2014-10-12 02:36:28.229 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.229 16010 INFO migrate.versioning.api [-] 29 -> 30... 2014-10-12 02:36:28.234 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table tasks 2014-10-12 02:36:28.480 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.480 16010 INFO migrate.versioning.api [-] 30 -> 31... 2014-10-12 02:36:28.499 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.499 16010 INFO migrate.versioning.api [-] 31 -> 32... 2014-10-12 02:36:28.507 16010 INFO glance.db.sqlalchemy.migrate_repo.schema [-] creating table task_info 2014-10-12 02:36:28.697 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.698 16010 INFO migrate.versioning.api [-] 32 -> 33... 2014-10-12 02:36:28.778 16010 INFO migrate.versioning.api [-] done 2014-10-12 02:36:28.778 16010 INFO migrate.versioning.api [-] 33 -> 34... 2014-10-12 02:36:28.843 16010 INFO migrate.versioning.api [-] done + create_glance_cache_dir + sudo mkdir -p /var/cache/glance/api + sudo chown cloudbase /var/cache/glance/api + rm -f /var/cache/glance/api/cacert.pem /var/cache/glance/api/revoked.pem /var/cache/glance/api/signing_cert.pem + sudo mkdir -p /var/cache/glance/registry + sudo chown cloudbase /var/cache/glance/registry + rm -f /var/cache/glance/registry/cacert.pem /var/cache/glance/registry/revoked.pem /var/cache/glance/registry/signing_cert.pem + is_service_enabled neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Configuring Neutron' + [[ -t 3 ]] + echo -e Configuring Neutron + configure_neutron + _configure_neutron_common + [[ ! -d /etc/neutron ]] + sudo chown cloudbase /etc/neutron + cp /opt/stack/neutron/etc/neutron.conf /etc/neutron/neutron.conf + neutron_plugin_configure_common + Q_PLUGIN_CONF_PATH=etc/neutron/plugins/ml2 + Q_PLUGIN_CONF_FILENAME=ml2_conf.ini + Q_DB_NAME=neutron_ml2 + Q_PLUGIN_CLASS=neutron.plugins.ml2.plugin.Ml2Plugin + _neutron_service_plugin_class_add neutron.services.l3_router.l3_router_plugin.L3RouterPlugin + local service_plugin_class=neutron.services.l3_router.l3_router_plugin.L3RouterPlugin + [[ '' == '' ]] + Q_SERVICE_PLUGIN_CLASSES=neutron.services.l3_router.l3_router_plugin.L3RouterPlugin + [[ etc/neutron/plugins/ml2 == '' ]] + [[ ml2_conf.ini == '' ]] + [[ neutron.plugins.ml2.plugin.Ml2Plugin == '' ]] + mkdir -p /etc/neutron/plugins/ml2 + Q_PLUGIN_CONF_FILE=etc/neutron/plugins/ml2/ml2_conf.ini + cp /opt/stack/neutron/etc/neutron/plugins/ml2/ml2_conf.ini /etc/neutron/plugins/ml2/ml2_conf.ini ++ database_connection_url neutron_ml2 ++ local db=neutron_ml2 ++ database_connection_url_mysql neutron_ml2 ++ local db=neutron_ml2 ++ echo 'mysql://root:Passw0rd@127.0.0.1/neutron_ml2?charset=utf8' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini database connection 'mysql://root:Passw0rd@127.0.0.1/neutron_ml2?charset=utf8' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT state_path /opt/stack/data/neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ 0 > 0 ]] + [[ '' != '' ]] + '[' libvirt = fake ']' + '[' False == True ']' + _neutron_setup_rootwrap + [[ True == \F\a\l\s\e ]] + Q_CONF_ROOTWRAP_D=/etc/neutron/rootwrap.d + [[ -d /etc/neutron/rootwrap.d ]] + sudo rm -rf /etc/neutron/rootwrap.d + mkdir -p -m 755 /etc/neutron/rootwrap.d + cp -pr /opt/stack/neutron/etc/neutron/rootwrap.d/debug.filters /opt/stack/neutron/etc/neutron/rootwrap.d/dhcp.filters /opt/stack/neutron/etc/neutron/rootwrap.d/iptables-firewall.filters /opt/stack/neutron/etc/neutron/rootwrap.d/l3.filters /opt/stack/neutron/etc/neutron/rootwrap.d/lbaas-haproxy.filters /opt/stack/neutron/etc/neutron/rootwrap.d/linuxbridge-plugin.filters /opt/stack/neutron/etc/neutron/rootwrap.d/nec-plugin.filters /opt/stack/neutron/etc/neutron/rootwrap.d/openvswitch-plugin.filters /opt/stack/neutron/etc/neutron/rootwrap.d/ryu-plugin.filters /opt/stack/neutron/etc/neutron/rootwrap.d/vpnaas.filters /etc/neutron/rootwrap.d/ + sudo chown -R root:root /etc/neutron/rootwrap.d + sudo chmod 644 /etc/neutron/rootwrap.d/debug.filters /etc/neutron/rootwrap.d/dhcp.filters /etc/neutron/rootwrap.d/iptables-firewall.filters /etc/neutron/rootwrap.d/l3.filters /etc/neutron/rootwrap.d/lbaas-haproxy.filters /etc/neutron/rootwrap.d/linuxbridge-plugin.filters /etc/neutron/rootwrap.d/nec-plugin.filters /etc/neutron/rootwrap.d/openvswitch-plugin.filters /etc/neutron/rootwrap.d/ryu-plugin.filters /etc/neutron/rootwrap.d/vpnaas.filters + test -r /opt/stack/neutron/etc/neutron/rootwrap.conf + sudo cp -p /opt/stack/neutron/etc/rootwrap.conf /etc/neutron/rootwrap.conf + sudo sed -e 's:^filters_path=.*$:filters_path=/etc/neutron/rootwrap.d:' -i /etc/neutron/rootwrap.conf + sudo chown root:root /etc/neutron/rootwrap.conf + sudo chmod 0644 /etc/neutron/rootwrap.conf + ROOTWRAP_SUDOER_CMD='/usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf *' ++ mktemp + TEMPFILE=/tmp/tmp.jcaWcVDrRF + echo 'cloudbase ALL=(root) NOPASSWD: /usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf *' + chmod 0440 /tmp/tmp.jcaWcVDrRF + sudo chown root:root /tmp/tmp.jcaWcVDrRF + sudo mv /tmp/tmp.jcaWcVDrRF /etc/sudoers.d/neutron-rootwrap + iniset /etc/neutron/neutron.conf agent root_helper 'sudo /usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset_rpc_backend neutron /etc/neutron/neutron.conf DEFAULT + local package=neutron + local file=/etc/neutron/neutron.conf + local section=DEFAULT + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled qpid ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + '[' -n '' ']' + is_service_enabled rabbit ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/neutron/neutron.conf DEFAULT rpc_backend neutron.openstack.common.rpc.impl_kombu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT rabbit_hosts 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT rabbit_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled q-lbaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_lbaas + neutron_agent_lbaas_configure_common + _neutron_service_plugin_class_add neutron.services.loadbalancer.plugin.LoadBalancerPlugin + local service_plugin_class=neutron.services.loadbalancer.plugin.LoadBalancerPlugin + [[ neutron.services.l3_router.l3_router_plugin.L3RouterPlugin == '' ]] + [[ ! ,neutron.services.l3_router.l3_router_plugin.L3RouterPlugin, =~ ,neutron.services.loadbalancer.plugin.LoadBalancerPlugin, ]] + Q_SERVICE_PLUGIN_CLASSES=neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin + neutron_agent_lbaas_configure_agent + LBAAS_AGENT_CONF_PATH=/etc/neutron/services/loadbalancer/haproxy + mkdir -p /etc/neutron/services/loadbalancer/haproxy + LBAAS_AGENT_CONF_FILENAME=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini + cp /opt/stack/neutron/etc/lbaas_agent.ini /etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini + iniset /etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini DEFAULT ovs_use_veth False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_setup_interface_driver /etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini + local conf_file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini + iniset /etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini DEFAULT interface_driver neutron.agent.linux.interface.OVSInterfaceDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + is_service_enabled q-metering ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_metering + neutron_agent_metering_configure_common + _neutron_service_plugin_class_add neutron.services.metering.metering_plugin.MeteringPlugin + local service_plugin_class=neutron.services.metering.metering_plugin.MeteringPlugin + [[ neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin == '' ]] + [[ ! ,neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin, =~ ,neutron.services.metering.metering_plugin.MeteringPlugin, ]] + Q_SERVICE_PLUGIN_CLASSES=neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin + neutron_agent_metering_configure_agent + METERING_AGENT_CONF_PATH=/etc/neutron/services/metering + mkdir -p /etc/neutron/services/metering + METERING_AGENT_CONF_FILENAME=/etc/neutron/services/metering/metering_agent.ini + cp /opt/stack/neutron/etc/metering_agent.ini /etc/neutron/services/metering/metering_agent.ini + is_service_enabled q-vpn ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_vpn + neutron_vpn_install_agent_packages + install_package openswan ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + apt_get install openswan ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + sudo DEBIAN_FRONTEND=noninteractive http_proxy= https_proxy= no_proxy= apt-get --option Dpkg::Options::=--force-confold --assume-yes install openswan + neutron_vpn_configure_common + _neutron_service_plugin_class_add neutron.services.vpn.plugin.VPNDriverPlugin + local service_plugin_class=neutron.services.vpn.plugin.VPNDriverPlugin + [[ neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin == '' ]] + [[ ! ,neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin, =~ ,neutron.services.vpn.plugin.VPNDriverPlugin, ]] + Q_SERVICE_PLUGIN_CLASSES=neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin,neutron.services.vpn.plugin.VPNDriverPlugin + is_service_enabled q-fwaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_fwaas + neutron_fwaas_configure_common + _neutron_service_plugin_class_add neutron.services.firewall.fwaas_plugin.FirewallPlugin + local service_plugin_class=neutron.services.firewall.fwaas_plugin.FirewallPlugin + [[ neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin,neutron.services.vpn.plugin.VPNDriverPlugin == '' ]] + [[ ! ,neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin,neutron.services.vpn.plugin.VPNDriverPlugin, =~ ,neutron.services.firewall.fwaas_plugin.FirewallPlugin, ]] + Q_SERVICE_PLUGIN_CLASSES=neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin,neutron.services.vpn.plugin.VPNDriverPlugin,neutron.services.firewall.fwaas_plugin.FirewallPlugin + neutron_fwaas_configure_driver + FWAAS_DRIVER_CONF_FILENAME=/etc/neutron/fwaas_driver.ini + cp /opt/stack/neutron/etc/fwaas_driver.ini /etc/neutron/fwaas_driver.ini + iniset_multiline /etc/neutron/fwaas_driver.ini fwaas enabled True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset_multiline /etc/neutron/fwaas_driver.ini fwaas driver neutron.services.firewall.drivers.linux.iptables_fwaas.IptablesFwaasDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled q-agt q-svc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_service + Q_API_PASTE_FILE=/etc/neutron/api-paste.ini + Q_POLICY_FILE=/etc/neutron/policy.json + cp /opt/stack/neutron/etc/api-paste.ini /etc/neutron/api-paste.ini + cp /opt/stack/neutron/etc/policy.json /etc/neutron/policy.json + iniset /etc/neutron/neutron.conf DEFAULT core_plugin neutron.plugins.ml2.plugin.Ml2Plugin ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin,neutron.services.vpn.plugin.VPNDriverPlugin,neutron.services.firewall.fwaas_plugin.FirewallPlugin != '' ]] + iniset /etc/neutron/neutron.conf DEFAULT service_plugins neutron.services.l3_router.l3_router_plugin.L3RouterPlugin,neutron.services.loadbalancer.plugin.LoadBalancerPlugin,neutron.services.metering.metering_plugin.MeteringPlugin,neutron.services.vpn.plugin.VPNDriverPlugin,neutron.services.firewall.fwaas_plugin.FirewallPlugin ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT policy_file /etc/neutron/policy.json ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT allow_overlapping_ips True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT auth_strategy keystone ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + _neutron_setup_keystone /etc/neutron/neutron.conf keystone_authtoken + local conf_file=/etc/neutron/neutron.conf + local section=keystone_authtoken + local use_auth_url= + local skip_auth_cache= + local use_service_port= + local keystone_port=35357 + [[ -n '' ]] + [[ -n '' ]] + iniset /etc/neutron/neutron.conf keystone_authtoken auth_host 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf keystone_authtoken auth_port 35357 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf keystone_authtoken auth_protocol http ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf keystone_authtoken admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf keystone_authtoken admin_user neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf keystone_authtoken admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ -z '' ]] + iniset /etc/neutron/neutron.conf keystone_authtoken signing_dir /var/cache/neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + create_neutron_cache_dir + sudo mkdir -p /var/cache/neutron + sudo chown cloudbase /var/cache/neutron + rm -f /var/cache/neutron/cacert.pem /var/cache/neutron/revoked.pem /var/cache/neutron/signing_cert.pem + iniset /etc/neutron/neutron.conf DEFAULT notify_nova_on_port_status_change True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT notify_nova_on_port_data_changes True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT nova_url http://10.14.0.26:8774/v2 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT nova_admin_username nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT nova_admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace ++ openstack project list ++ awk '/ service / { print $2 }' + ADMIN_TENANT_ID=a0ab893ec5894bde968f3d6d06537d17 + iniset /etc/neutron/neutron.conf DEFAULT nova_admin_tenant_id a0ab893ec5894bde968f3d6d06537d17 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT nova_admin_auth_url http://10.14.0.26:35357/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_configure_service + [[ vlan != '' ]] + Q_SRV_EXTRA_OPTS+=(tenant_network_types=$Q_ML2_TENANT_NETWORK_TYPE) + '[' '' == '' ']' + [[ '' = '' ]] + [[ physnet1 != '' ]] + ML2_VLAN_RANGES=physnet1 + [[ 500:2000 != '' ]] + ML2_VLAN_RANGES=physnet1:500:2000 + [[ physnet1:500:2000 != '' ]] + Q_ML2_PLUGIN_VLAN_TYPE_OPTIONS=(network_vlan_ranges=$ML2_VLAN_RANGES) + [[ True == \T\r\u\e ]] + iniset /etc/neutron/plugins/ml2/ml2_conf.ini securitygroup firewall_driver neutron.agent.not.a.real.FirewallDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ovs local_ip 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + populate_ml2_config /etc/neutron/plugins/ml2/ml2_conf.ini ml2 mechanism_drivers=openvswitch,hyperv + CONF=/etc/neutron/plugins/ml2/ml2_conf.ini + SECTION=ml2 + OPTS=mechanism_drivers=openvswitch,hyperv + '[' -z mechanism_drivers=openvswitch,hyperv ']' + for I in '"${OPTS[@]}"' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ml2 mechanism_drivers openvswitch,hyperv ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + populate_ml2_config /etc/neutron/plugins/ml2/ml2_conf.ini ml2 type_drivers=local,flat,vlan,gre,vxlan + CONF=/etc/neutron/plugins/ml2/ml2_conf.ini + SECTION=ml2 + OPTS=type_drivers=local,flat,vlan,gre,vxlan + '[' -z type_drivers=local,flat,vlan,gre,vxlan ']' + for I in '"${OPTS[@]}"' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ml2 type_drivers local,flat,vlan,gre,vxlan ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + populate_ml2_config /etc/neutron/plugins/ml2/ml2_conf.ini ml2 tenant_network_types=vlan + CONF=/etc/neutron/plugins/ml2/ml2_conf.ini + SECTION=ml2 + OPTS=tenant_network_types=vlan + '[' -z tenant_network_types=vlan ']' + for I in '"${OPTS[@]}"' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ml2 tenant_network_types vlan ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + populate_ml2_config /etc/neutron/plugins/ml2/ml2_conf.ini ml2_type_gre tunnel_id_ranges=1:1000 + CONF=/etc/neutron/plugins/ml2/ml2_conf.ini + SECTION=ml2_type_gre + OPTS=tunnel_id_ranges=1:1000 + '[' -z tunnel_id_ranges=1:1000 ']' + for I in '"${OPTS[@]}"' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ml2_type_gre tunnel_id_ranges 1:1000 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + populate_ml2_config /etc/neutron/plugins/ml2/ml2_conf.ini ml2_type_vxlan vni_ranges=1001:2000 + CONF=/etc/neutron/plugins/ml2/ml2_conf.ini + SECTION=ml2_type_vxlan + OPTS=vni_ranges=1001:2000 + '[' -z vni_ranges=1001:2000 ']' + for I in '"${OPTS[@]}"' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ml2_type_vxlan vni_ranges 1001:2000 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + populate_ml2_config /etc/neutron/plugins/ml2/ml2_conf.ini ml2_type_vlan network_vlan_ranges=physnet1:500:2000 + CONF=/etc/neutron/plugins/ml2/ml2_conf.ini + SECTION=ml2_type_vlan + OPTS=network_vlan_ranges=physnet1:500:2000 + '[' -z network_vlan_ranges=physnet1:500:2000 ']' + for I in '"${OPTS[@]}"' + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ml2_type_vlan network_vlan_ranges physnet1:500:2000 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled q-agt ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_plugin_agent + iniset /etc/neutron/plugins/ml2/ml2_conf.ini agent root_helper 'sudo /usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/neutron.conf DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_configure_plugin_agent + _neutron_ovs_base_setup_bridge br-int + local bridge=br-int + neutron-ovs-cleanup + sudo ovs-vsctl --no-wait -- --may-exist add-br br-int + sudo ovs-vsctl --no-wait br-set-external-id br-int bridge-id br-int + _neutron_ovs_base_configure_firewall_driver + [[ True == \T\r\u\e ]] + iniset /etc/neutron/plugins/ml2/ml2_conf.ini securitygroup firewall_driver neutron.agent.linux.iptables_firewall.OVSHybridIptablesFirewallDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ False = \T\r\u\e ]] + [[ physnet1:br-eth1 = '' ]] + [[ physnet1:br-eth1 != '' ]] + iniset /etc/neutron/plugins/ml2/ml2_conf.ini ovs bridge_mappings physnet1:br-eth1 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + AGENT_BINARY=/usr/local/bin/neutron-openvswitch-agent + '[' libvirt = xenserver ']' + is_service_enabled q-dhcp ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_dhcp_agent + AGENT_DHCP_BINARY=/usr/local/bin/neutron-dhcp-agent + Q_DHCP_CONF_FILE=/etc/neutron/dhcp_agent.ini + cp /opt/stack/neutron/etc/dhcp_agent.ini /etc/neutron/dhcp_agent.ini + iniset /etc/neutron/dhcp_agent.ini DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/dhcp_agent.ini DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/dhcp_agent.ini DEFAULT use_namespaces True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/dhcp_agent.ini DEFAULT root_helper 'sudo /usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + _neutron_setup_interface_driver /etc/neutron/dhcp_agent.ini + iniset /etc/neutron/dhcp_agent.ini DEFAULT ovs_use_veth False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_setup_interface_driver /etc/neutron/dhcp_agent.ini + local conf_file=/etc/neutron/dhcp_agent.ini + iniset /etc/neutron/dhcp_agent.ini DEFAULT interface_driver neutron.agent.linux.interface.OVSInterfaceDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_configure_dhcp_agent + iniset /etc/neutron/dhcp_agent.ini DEFAULT dhcp_agent_manager neutron.agent.dhcp_agent.DhcpAgentWithStateReport ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled q-l3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_l3_agent + Q_L3_ENABLED=True + Q_L3_ROUTER_PER_TENANT=True + AGENT_L3_BINARY=/usr/local/bin/neutron-l3-agent + Q_L3_CONF_FILE=/etc/neutron/l3_agent.ini + is_service_enabled q-fwaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + Q_FWAAS_CONF_FILE=/etc/neutron/fwaas_driver.ini + cp /opt/stack/neutron/etc/l3_agent.ini /etc/neutron/l3_agent.ini + iniset /etc/neutron/l3_agent.ini DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/l3_agent.ini DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/l3_agent.ini DEFAULT use_namespaces True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/l3_agent.ini DEFAULT root_helper 'sudo /usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + _neutron_setup_interface_driver /etc/neutron/l3_agent.ini + iniset /etc/neutron/l3_agent.ini DEFAULT ovs_use_veth False ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_setup_interface_driver /etc/neutron/l3_agent.ini + local conf_file=/etc/neutron/l3_agent.ini + iniset /etc/neutron/l3_agent.ini DEFAULT interface_driver neutron.agent.linux.interface.OVSInterfaceDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_configure_l3_agent + _neutron_ovs_base_configure_l3_agent + iniset /etc/neutron/l3_agent.ini DEFAULT external_network_bridge br-ex ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron-ovs-cleanup + sudo ovs-vsctl --no-wait -- --may-exist add-br br-ex + sudo ovs-vsctl --no-wait br-set-external-id br-ex bridge-id br-ex + sudo ip addr flush dev br-ex + iniset /etc/neutron/l3_agent.ini DEFAULT l3_agent_manager neutron.agent.l3_agent.L3NATAgentWithStateReport ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled q-meta ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + _configure_neutron_metadata_agent + AGENT_META_BINARY=/usr/local/bin/neutron-metadata-agent + Q_META_CONF_FILE=/etc/neutron/metadata_agent.ini + cp /opt/stack/neutron/etc/metadata_agent.ini /etc/neutron/metadata_agent.ini + iniset /etc/neutron/metadata_agent.ini DEFAULT verbose True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/metadata_agent.ini DEFAULT debug True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/metadata_agent.ini DEFAULT nova_metadata_ip 10.14.0.26 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/metadata_agent.ini DEFAULT root_helper 'sudo /usr/local/bin/neutron-rootwrap /etc/neutron/rootwrap.conf' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + _neutron_setup_keystone /etc/neutron/metadata_agent.ini DEFAULT True True True + local conf_file=/etc/neutron/metadata_agent.ini + local section=DEFAULT + local use_auth_url=True + local skip_auth_cache=True + local use_service_port=True + local keystone_port=35357 + [[ -n True ]] + keystone_port=5000 + [[ -n True ]] + iniset /etc/neutron/metadata_agent.ini DEFAULT auth_url http://10.14.0.26:5000/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/metadata_agent.ini DEFAULT admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/metadata_agent.ini DEFAULT admin_user neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/neutron/metadata_agent.ini DEFAULT admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ -z True ]] + _configure_neutron_debug_command + [[ False != \T\r\u\e ]] + return + is_service_enabled mysql postgresql ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled q-svc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + init_neutron + recreate_database neutron_ml2 utf8 + local db=neutron_ml2 + local charset=utf8 + recreate_database_mysql neutron_ml2 utf8 + local db=neutron_ml2 + local charset=utf8 + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'DROP DATABASE IF EXISTS neutron_ml2;' + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'CREATE DATABASE neutron_ml2 CHARACTER SET utf8;' + /usr/local/bin/neutron-db-manage --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini upgrade head No handlers could be found for logger "neutron.common.legacy" INFO [alembic.migration] Context impl MySQLImpl. INFO [alembic.migration] Will assume non-transactional DDL. INFO [alembic.migration] Running upgrade None -> folsom, folsom initial database INFO [alembic.migration] Running upgrade folsom -> 2c4af419145b, l3_support INFO [alembic.migration] Running upgrade 2c4af419145b -> 5a875d0e5c, ryu INFO [alembic.migration] Running upgrade 5a875d0e5c -> 48b6f43f7471, DB support for service types INFO [alembic.migration] Running upgrade 48b6f43f7471 -> 3cb5d900c5de, security_groups INFO [alembic.migration] Running upgrade 3cb5d900c5de -> 1d76643bcec4, nvp_netbinding INFO [alembic.migration] Running upgrade 1d76643bcec4 -> 2a6d0b51f4bb, cisco plugin cleanup INFO [alembic.migration] Running upgrade 2a6d0b51f4bb -> 1b693c095aa3, Quota ext support added in Grizzly INFO [alembic.migration] Running upgrade 1b693c095aa3 -> 1149d7de0cfa, initial port security INFO [alembic.migration] Running upgrade 1149d7de0cfa -> 49332180ca96, ryu plugin update INFO [alembic.migration] Running upgrade 49332180ca96 -> 38335592a0dc, nvp_portmap INFO [alembic.migration] Running upgrade 38335592a0dc -> 54c2c487e913, 'DB support for load balancing service INFO [alembic.migration] Running upgrade 54c2c487e913 -> 45680af419f9, nvp_qos INFO [alembic.migration] Running upgrade 45680af419f9 -> 1c33fa3cd1a1, Support routing table configuration on Router INFO [alembic.migration] Running upgrade 1c33fa3cd1a1 -> 363468ac592c, nvp_network_gw INFO [alembic.migration] Running upgrade 363468ac592c -> 511471cc46b, Add agent management extension model support INFO [alembic.migration] Running upgrade 511471cc46b -> 3b54bf9e29f7, NEC plugin sharednet INFO [alembic.migration] Running upgrade 3b54bf9e29f7 -> 4692d074d587, agent scheduler INFO [alembic.migration] Running upgrade 4692d074d587 -> 1341ed32cc1e, nvp_net_binding INFO [alembic.migration] Running upgrade 1341ed32cc1e -> grizzly, grizzly INFO [alembic.migration] Running upgrade grizzly -> f489cf14a79c, DB support for load balancing service (havana) INFO [alembic.migration] Running upgrade f489cf14a79c -> 176a85fc7d79, Add portbindings db INFO [alembic.migration] Running upgrade 176a85fc7d79 -> 32b517556ec9, remove TunnelIP model INFO [alembic.migration] Running upgrade 32b517556ec9 -> 128e042a2b68, ext_gw_mode INFO [alembic.migration] Running upgrade 128e042a2b68 -> 5ac71e65402c, ml2_initial INFO [alembic.migration] Running upgrade 5ac71e65402c -> 3cbf70257c28, nvp_mac_learning INFO [alembic.migration] Running upgrade 3cbf70257c28 -> 5918cbddab04, add tables for router rules support INFO [alembic.migration] Running upgrade 5918cbddab04 -> 3cabb850f4a5, Table to track port to host associations INFO [alembic.migration] Running upgrade 3cabb850f4a5 -> b7a8863760e, Remove cisco_vlan_bindings table INFO [alembic.migration] Running upgrade b7a8863760e -> 13de305df56e, nec_add_pf_name INFO [alembic.migration] Running upgrade 13de305df56e -> 20ae61555e95, DB Migration for ML2 GRE Type Driver INFO [alembic.migration] Running upgrade 20ae61555e95 -> 477a4488d3f4, DB Migration for ML2 VXLAN Type Driver INFO [alembic.migration] Running upgrade 477a4488d3f4 -> 2032abe8edac, LBaaS add status description INFO [alembic.migration] Running upgrade 2032abe8edac -> 52c5e4a18807, LBaaS Pool scheduler INFO [alembic.migration] Running upgrade 52c5e4a18807 -> 557edfc53098, New service types framework (service providers) INFO [alembic.migration] Running upgrade 557edfc53098 -> e6b16a30d97, Add cisco_provider_networks table INFO [alembic.migration] Running upgrade e6b16a30d97 -> 39cf3f799352, FWaaS Havana-2 model INFO [alembic.migration] Running upgrade 39cf3f799352 -> 52ff27f7567a, Support for VPNaaS INFO [alembic.migration] Running upgrade 52ff27f7567a -> 11c6e18605c8, Pool Monitor status field INFO [alembic.migration] Running upgrade 11c6e18605c8 -> 35c7c198ddea, remove status from HealthMonitor INFO [alembic.migration] Running upgrade 35c7c198ddea -> 263772d65691, Cisco plugin db cleanup part II INFO [alembic.migration] Running upgrade 263772d65691 -> c88b6b5fea3, Cisco N1KV tables INFO [alembic.migration] Running upgrade c88b6b5fea3 -> f9263d6df56, remove_dhcp_lease INFO [alembic.migration] Running upgrade f9263d6df56 -> 569e98a8132b, metering INFO [alembic.migration] Running upgrade 569e98a8132b -> 86cf4d88bd3, remove bigswitch port tracking table INFO [alembic.migration] Running upgrade 86cf4d88bd3 -> 3c6e57a23db4, add multiprovider INFO [alembic.migration] Running upgrade 3c6e57a23db4 -> 63afba73813, Add unique constraint for id column of TunnelEndpoint INFO [alembic.migration] Running upgrade 63afba73813 -> 40dffbf4b549, nvp_dist_router INFO [alembic.migration] Running upgrade 40dffbf4b549 -> 53bbd27ec841, Extra dhcp opts support INFO [alembic.migration] Running upgrade 53bbd27ec841 -> 46a0efbd8f0, cisco_n1kv_multisegment_trunk INFO [alembic.migration] Running upgrade 46a0efbd8f0 -> 2a3bae1ceb8, NEC Port Binding INFO [alembic.migration] Running upgrade 2a3bae1ceb8 -> 14f24494ca31, DB Migration for Arista ml2 mechanism driver INFO [alembic.migration] Running upgrade 14f24494ca31 -> 32a65f71af51, ml2 portbinding INFO [alembic.migration] Running upgrade 32a65f71af51 -> 66a59a7f516, NEC OpenFlow Router INFO [alembic.migration] Running upgrade 66a59a7f516 -> 51b4de912379, Cisco Nexus ML2 mechanism driver INFO [alembic.migration] Running upgrade 51b4de912379 -> 1efb85914233, allowedaddresspairs INFO [alembic.migration] Running upgrade 1efb85914233 -> 38fc1f6789f8, Cisco N1KV overlay support INFO [alembic.migration] Running upgrade 38fc1f6789f8 -> 4a666eb208c2, service router INFO [alembic.migration] Running upgrade 4a666eb208c2 -> 338d7508968c, vpnaas peer_address size increase INFO [alembic.migration] Running upgrade 338d7508968c -> 3ed8f075e38a, nvp fwaas plugin INFO [alembic.migration] Running upgrade 3ed8f075e38a -> 3d6fae8b70b0, nvp lbaas plugin INFO [alembic.migration] Running upgrade 3d6fae8b70b0 -> 1064e98b7917, nec-pf-port-del INFO [alembic.migration] Running upgrade 1064e98b7917 -> 2528ceb28230, NEC PacketFilter network_id nullable fix INFO [alembic.migration] Running upgrade 2528ceb28230 -> 3a520dd165d0, Cisco Nexus multi-switch INFO [alembic.migration] Running upgrade 3a520dd165d0 -> 27ef74513d33, quota_in_plumgrid_plugin INFO [alembic.migration] Running upgrade 27ef74513d33 -> 49f5e553f61f, security_groups INFO [alembic.migration] Running upgrade 49f5e553f61f -> 40b0aff0302e, mlnx_initial INFO [alembic.migration] Running upgrade 40b0aff0302e -> havana, havana INFO [alembic.migration] Running upgrade havana -> e197124d4b9, add unique constraint to members INFO [alembic.migration] Running upgrade e197124d4b9 -> 1fcfc149aca4, Add a unique constraint on (agent_type, host) columns to prevent a race condition when an agent entry is 'upserted'. INFO [alembic.migration] Running upgrade 1fcfc149aca4 -> 50e86cb2637a, nsx_mappings INFO [alembic.migration] Running upgrade 50e86cb2637a -> ed93525fd003, bigswitch_quota INFO [alembic.migration] Running upgrade ed93525fd003 -> 8f682276ee4, ryu plugin quota INFO [alembic.migration] Running upgrade 8f682276ee4 -> 1421183d533f, NSX DHCP/metadata support INFO [alembic.migration] Running upgrade 1421183d533f -> 3d3cb89d84ee, nsx_switch_mappings INFO [alembic.migration] Running upgrade 3d3cb89d84ee -> 4ca36cfc898c, nsx_router_mappings INFO [alembic.migration] Running upgrade 4ca36cfc898c -> 27cc183af192, ml2_vnic_type INFO [alembic.migration] Running upgrade 27cc183af192 -> 50d5ba354c23, ml2 binding:vif_details INFO [alembic.migration] Running upgrade 50d5ba354c23 -> 157a5d299379, ml2 binding:profile INFO [alembic.migration] Running upgrade 157a5d299379 -> 3d2585038b95, VMware NSX rebranding INFO [alembic.migration] Running upgrade 3d2585038b95 -> abc88c33f74f, lb stats INFO [alembic.migration] Running upgrade abc88c33f74f -> 1b2580001654, nsx_sec_group_mapping INFO [alembic.migration] Running upgrade 1b2580001654 -> e766b19a3bb, nuage_initial INFO [alembic.migration] Running upgrade e766b19a3bb -> f44ab9871cd6, bsn_security_groups INFO [alembic.migration] Running upgrade f44ab9871cd6 -> 2eeaf963a447, floatingip_status INFO [alembic.migration] Running upgrade 2eeaf963a447 -> fcac4c42e2cc, bsn_addresspairs INFO [alembic.migration] Running upgrade fcac4c42e2cc -> 492a106273f8, Brocade ML2 Mech. Driver INFO [alembic.migration] Running upgrade 492a106273f8 -> 24c7ea5160d7, Cisco CSR VPNaaS INFO [alembic.migration] Running upgrade 24c7ea5160d7 -> 81c553f3776c, bsn_consistencyhashes INFO [alembic.migration] Running upgrade 81c553f3776c -> 117643811bca, nec: delete old ofc mapping tables INFO [alembic.migration] Running upgrade 117643811bca -> 19180cf98af6, nsx_gw_devices INFO [alembic.migration] Running upgrade 19180cf98af6 -> 33dd0a9fa487, embrane_lbaas_driver INFO [alembic.migration] Running upgrade 33dd0a9fa487 -> 2447ad0e9585, Add IPv6 Subnet properties INFO [alembic.migration] Running upgrade 2447ad0e9585 -> 538732fa21e1, NEC Rename quantum_id to neutron_id INFO [alembic.migration] Running upgrade 538732fa21e1 -> 5ac1c354a051, n1kv segment allocs for cisco n1kv plugin INFO [alembic.migration] Running upgrade 5ac1c354a051 -> icehouse, icehouse + is_service_enabled neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + configure_neutron_third_party + _neutron_third_party_do configure + init_neutron_third_party + _neutron_third_party_do init + start_neutron_third_party + _neutron_third_party_do start + is_service_enabled n-net q-dhcp ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 ++ pidof NetworkManager ++ true + netman_pid= + '[' -z '' ']' + sudo killall dnsmasq dnsmasq: no process found + true + clean_iptables + sudo iptables -S -v + sed 's/-c [0-9]* [0-9]* //g' + grep '\-A' + sed s/-A/-D/g + bash + awk '{print "sudo iptables",$0}' + grep nova + sudo iptables -S -v -t nat + sed 's/-c [0-9]* [0-9]* //g' + grep nova + grep '\-A' + sed s/-A/-D/g + awk '{print "sudo iptables -t nat",$0}' + bash + sudo iptables -S -v + grep '\-N' + sed 's/-c [0-9]* [0-9]* //g' + sed s/-N/-X/g + grep nova + awk '{print "sudo iptables",$0}' + bash + sudo iptables -S -v -t nat + sed 's/-c [0-9]* [0-9]* //g' + grep nova + grep '\-N' + sed s/-N/-X/g + bash + awk '{print "sudo iptables -t nat",$0}' + is_service_enabled n-net ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + sudo sysctl -w net.ipv4.ip_forward=1 + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Configuring Swift' + [[ -t 3 ]] + echo -e Configuring Swift + init_swift + local node_number + swift-init --run-dir=/opt/stack/data/swift/run all stop + true + create_swift_disk + local node_number ++ id -g cloudbase + USER_GROUP=1000 + sudo mkdir -p /opt/stack/data/swift/drives /opt/stack/data/swift/cache /opt/stack/data/swift/run /opt/stack/data/swift/logs + sudo chown -R cloudbase:1000 /opt/stack/data/swift + [[ -e /opt/stack/data/swift/drives/images/swift.img ]] + mkdir -p /opt/stack/data/swift/drives/images + sudo touch /opt/stack/data/swift/drives/images/swift.img + sudo chown cloudbase: /opt/stack/data/swift/drives/images/swift.img + truncate -s 6G /opt/stack/data/swift/drives/images/swift.img + /sbin/mkfs.xfs -f -i size=1024 /opt/stack/data/swift/drives/images/swift.img + mkdir -p /opt/stack/data/swift/drives/sdb1 + egrep -q /opt/stack/data/swift/drives/sdb1 /proc/mounts + sudo mount -t xfs -o loop,noatime,nodiratime,nobarrier,logbufs=8 /opt/stack/data/swift/drives/images/swift.img /opt/stack/data/swift/drives/sdb1 + for node_number in '${SWIFT_REPLICAS_SEQ}' + sudo ln -sf /opt/stack/data/swift/drives/sdb1/1 /opt/stack/data/swift/1 + drive=/opt/stack/data/swift/drives/sdb1/1 + node=/opt/stack/data/swift/1/node + node_device=/opt/stack/data/swift/1/node/sdb1 + [[ -d /opt/stack/data/swift/1/node ]] + [[ -d /opt/stack/data/swift/drives/sdb1/1 ]] + sudo install -o cloudbase -g 1000 -d /opt/stack/data/swift/drives/sdb1/1 + sudo install -o cloudbase -g 1000 -d /opt/stack/data/swift/1/node/sdb1 + sudo chown -R cloudbase: /opt/stack/data/swift/1/node + pushd /etc/swift + rm -f account.builder container.builder object.builder account.ring.gz container.ring.gz object.ring.gz backups/1413066754.account.builder backups/1413066754.container.builder backups/1413066754.object.builder backups/1413066755.account.builder backups/1413066755.container.builder backups/1413066754.object.ring.gz backups/1413066755.account.ring.gz backups/1413066755.container.ring.gz + swift-ring-builder object.builder create 9 1 1 + swift-ring-builder container.builder create 9 1 1 + swift-ring-builder account.builder create 9 1 1 + for node_number in '${SWIFT_REPLICAS_SEQ}' + swift-ring-builder object.builder add z1-127.0.0.1:6013/sdb1 1 WARNING: No region specified for z1-127.0.0.1:6013/sdb1. Defaulting to region 1. + swift-ring-builder container.builder add z1-127.0.0.1:6011/sdb1 1 WARNING: No region specified for z1-127.0.0.1:6011/sdb1. Defaulting to region 1. + swift-ring-builder account.builder add z1-127.0.0.1:6012/sdb1 1 WARNING: No region specified for z1-127.0.0.1:6012/sdb1. Defaulting to region 1. + swift-ring-builder object.builder rebalance + swift-ring-builder container.builder rebalance + swift-ring-builder account.builder rebalance + popd + sudo mkdir -p /var/cache/swift + sudo chown cloudbase /var/cache/swift + rm -f /var/cache/swift/cacert.pem /var/cache/swift/revoked.pem /var/cache/swift/signing_cert.pem + is_service_enabled cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Configuring Cinder' + [[ -t 3 ]] + echo -e Configuring Cinder + init_cinder ++ echo ec2,osapi_compute,metadata ++ sed s/osapi_volume,// + NOVA_ENABLED_APIS=ec2,osapi_compute,metadata + is_service_enabled mysql postgresql ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + recreate_database cinder utf8 + local db=cinder + local charset=utf8 + recreate_database_mysql cinder utf8 + local db=cinder + local charset=utf8 + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'DROP DATABASE IF EXISTS cinder;' + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'CREATE DATABASE cinder CHARACTER SET utf8;' + /opt/stack/cinder/bin/cinder-manage db sync 2014-10-12 02:36:39.825 17111 DEBUG cinder.utils [-] backend __get_backend /opt/stack/cinder/cinder/utils.py:424 2014-10-12 02:36:39.826 17111 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:36:39.826 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/001_cinder_init.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.826 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/001_cinder_init.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.826 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/002_quota_class.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.826 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/002_quota_class.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/003_glance_metadata.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/003_glance_metadata.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/004_volume_type_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/004_volume_type_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/005_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/005_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/005_add_source_volume_column.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/005_add_source_volume_column.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/006_snapshots_add_provider_location.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/007_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/007_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/007_add_volume_snapshot_fk.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/007_add_volume_snapshot_fk.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/008_add_backup.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.827 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/008_add_backup.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/009_add_snapshot_metadata_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/009_add_snapshot_metadata_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/010_add_transfers_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/010_add_transfers_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/011_add_bootable_column.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/011_add_bootable_column.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/012_add_attach_host_column.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/012_add_attach_host_column.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/012_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/012_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/013_add_provider_geometry_column.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.828 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/013_add_provider_geometry_column.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/014_add_name_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/014_add_name_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/014_sqlite_downgrade.sql... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/014_sqlite_downgrade.sql loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/015_drop_migrations_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/015_drop_migrations_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/016_drop_sm_tables.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/016_drop_sm_tables.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/017_add_encryption_information.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/017_add_encryption_information.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/018_add_qos_specs.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/018_add_qos_specs.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/019_add_migration_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/019_add_migration_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/020_add_volume_admin_metadata_table.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/020_add_volume_admin_metadata_table.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/021_add_default_quota_class.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.829 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/021_add_default_quota_class.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.830 17111 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/022_add_reason_column_to_service.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:39.830 17111 DEBUG migrate.versioning.script.base [-] Script /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo/versions/022_add_reason_column_to_service.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:39.830 17111 DEBUG migrate.versioning.repository [-] Repository /opt/stack/cinder/cinder/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:36:39.830 17111 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'cinder'), ('version_table', 'migrate_version'), ('required_dbs', '[]')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:36:39.896 17111 INFO migrate.versioning.api [-] 0 -> 1... 2014-10-12 02:36:40.907 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:40.908 17111 INFO migrate.versioning.api [-] 1 -> 2... 2014-10-12 02:36:41.109 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.109 17111 INFO migrate.versioning.api [-] 2 -> 3... 2014-10-12 02:36:41.165 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.166 17111 INFO migrate.versioning.api [-] 3 -> 4... 2014-10-12 02:36:41.445 17111 INFO 004_volume_type_to_uuid [-] Created foreign key volume_type_extra_specs_ibfk_1 2014-10-12 02:36:41.462 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.462 17111 INFO migrate.versioning.api [-] 4 -> 5... 2014-10-12 02:36:41.540 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.541 17111 INFO migrate.versioning.api [-] 5 -> 6... 2014-10-12 02:36:41.618 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.618 17111 INFO migrate.versioning.api [-] 6 -> 7... 2014-10-12 02:36:41.691 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.692 17111 INFO migrate.versioning.api [-] 7 -> 8... 2014-10-12 02:36:41.732 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.732 17111 INFO migrate.versioning.api [-] 8 -> 9... 2014-10-12 02:36:41.777 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.777 17111 INFO migrate.versioning.api [-] 9 -> 10... 2014-10-12 02:36:41.823 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.823 17111 INFO migrate.versioning.api [-] 10 -> 11... 2014-10-12 02:36:41.911 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.911 17111 INFO migrate.versioning.api [-] 11 -> 12... 2014-10-12 02:36:41.992 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:41.992 17111 INFO migrate.versioning.api [-] 12 -> 13... 2014-10-12 02:36:42.059 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.060 17111 INFO migrate.versioning.api [-] 13 -> 14... 2014-10-12 02:36:42.139 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.139 17111 INFO migrate.versioning.api [-] 14 -> 15... 2014-10-12 02:36:42.168 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.168 17111 INFO migrate.versioning.api [-] 15 -> 16... 2014-10-12 02:36:42.242 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.242 17111 INFO migrate.versioning.api [-] 16 -> 17... 2014-10-12 02:36:42.443 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.443 17111 INFO migrate.versioning.api [-] 17 -> 18... 2014-10-12 02:36:42.579 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.579 17111 INFO migrate.versioning.api [-] 18 -> 19... 2014-10-12 02:36:42.648 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.648 17111 INFO migrate.versioning.api [-] 19 -> 20... 2014-10-12 02:36:42.697 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.697 17111 INFO migrate.versioning.api [-] 20 -> 21... 2014-10-12 02:36:42.738 17111 INFO 021_add_default_quota_class [-] Added default quota class data into the DB. 2014-10-12 02:36:42.758 17111 INFO migrate.versioning.api [-] done 2014-10-12 02:36:42.758 17111 INFO migrate.versioning.api [-] 21 -> 22... 2014-10-12 02:36:42.829 17111 INFO migrate.versioning.api [-] done + is_service_enabled c-vol ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + create_cinder_volume_group + sudo vgs stack-volumes + '[' False = True ']' + mkdir -p /opt/stack/data/cinder/volumes + sudo vgs stack-volumes + is_fedora + [[ -z Ubuntu ]] + '[' Ubuntu = Fedora ']' + '[' Ubuntu = 'Red Hat' ']' + '[' Ubuntu = CentOS ']' + is_suse + [[ -z Ubuntu ]] + '[' Ubuntu = openSUSE ']' + '[' Ubuntu = 'SUSE LINUX' ']' + sudo tgtadm --op show --mode target + grep volume- + grep Target + sudo xargs -n1 tgt-admin --delete + cut -f3 -d ' ' tgtadm: can't send the request to the tgt daemon, Transport endpoint is not connected Option delete requires an argument + _clean_lvm_lv stack-volumes volume- + local vg=stack-volumes + local lv_prefix=volume- ++ sudo lvs --noheadings -o lv_name stack-volumes + '[' False = True ']' + create_cinder_cache_dir + sudo mkdir -p /var/cache/cinder + sudo chown cloudbase /var/cache/cinder + rm -f /var/cache/cinder/cacert.pem /var/cache/cinder/revoked.pem /var/cache/cinder/signing_cert.pem + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Configuring Nova' + [[ -t 3 ]] + echo -e Configuring Nova + init_nova + is_service_enabled mysql postgresql ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + recreate_database nova latin1 + local db=nova + local charset=latin1 + recreate_database_mysql nova latin1 + local db=nova + local charset=latin1 + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'DROP DATABASE IF EXISTS nova;' + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'CREATE DATABASE nova CHARACTER SET latin1;' + /usr/local/bin/nova-manage db sync 2014-10-12 02:36:44.767 17155 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/nova/nova/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/224_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/224_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/225_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/225_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/226_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/226_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/227_fix_project_user_quotas_resource_length.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/227_fix_project_user_quotas_resource_length.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.768 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/228_add_metrics_in_compute_nodes.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/228_add_metrics_in_compute_nodes.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/229_add_extra_resources_in_compute_nodes.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/229_add_extra_resources_in_compute_nodes.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/230_add_details_column_to_instance_actions_events.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/230_add_details_column_to_instance_actions_events.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/231_add_ephemeral_key_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/231_add_ephemeral_key_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/232_drop_dump_tables.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/232_drop_dump_tables.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/233_add_stats_in_compute_nodes.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/233_add_stats_in_compute_nodes.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/216_havana.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/216_havana.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/217_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.769 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/217_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/218_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/218_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/219_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/219_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/220_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/220_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/221_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/221_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/222_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/222_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/223_placeholder.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.script.base [-] Script /opt/stack/nova/nova/db/sqlalchemy/migrate_repo/versions/223_placeholder.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.repository [-] Repository /opt/stack/nova/nova/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:36:44.770 17155 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'nova'), ('version_table', 'migrate_version'), ('required_dbs', '[]')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:36:44.834 17155 INFO migrate.versioning.api [-] 215 -> 216... 2014-10-12 02:36:52.638 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.638 17155 INFO migrate.versioning.api [-] 216 -> 217... 2014-10-12 02:36:52.651 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.651 17155 INFO migrate.versioning.api [-] 217 -> 218... 2014-10-12 02:36:52.667 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.667 17155 INFO migrate.versioning.api [-] 218 -> 219... 2014-10-12 02:36:52.679 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.680 17155 INFO migrate.versioning.api [-] 219 -> 220... 2014-10-12 02:36:52.690 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.690 17155 INFO migrate.versioning.api [-] 220 -> 221... 2014-10-12 02:36:52.704 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.704 17155 INFO migrate.versioning.api [-] 221 -> 222... 2014-10-12 02:36:52.719 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.719 17155 INFO migrate.versioning.api [-] 222 -> 223... 2014-10-12 02:36:52.732 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.733 17155 INFO migrate.versioning.api [-] 223 -> 224... 2014-10-12 02:36:52.745 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.745 17155 INFO migrate.versioning.api [-] 224 -> 225... 2014-10-12 02:36:52.758 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.758 17155 INFO migrate.versioning.api [-] 225 -> 226... 2014-10-12 02:36:52.774 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.774 17155 INFO migrate.versioning.api [-] 226 -> 227... 2014-10-12 02:36:52.803 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.803 17155 INFO migrate.versioning.api [-] 227 -> 228... 2014-10-12 02:36:52.921 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:52.921 17155 INFO migrate.versioning.api [-] 228 -> 229... 2014-10-12 02:36:53.046 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:53.047 17155 INFO migrate.versioning.api [-] 229 -> 230... 2014-10-12 02:36:53.271 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:53.271 17155 INFO migrate.versioning.api [-] 230 -> 231... 2014-10-12 02:36:53.401 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:53.401 17155 INFO migrate.versioning.api [-] 231 -> 232... 2014-10-12 02:36:53.660 17155 INFO migrate.versioning.api [-] done 2014-10-12 02:36:53.660 17155 INFO migrate.versioning.api [-] 232 -> 233... 2014-10-12 02:36:53.829 17155 INFO migrate.versioning.api [-] done + is_service_enabled n-cell ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_baremetal + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ baremetal ]] + return 1 + create_nova_cache_dir + sudo mkdir -p /var/cache/nova + sudo chown cloudbase /var/cache/nova + rm -f '/var/cache/nova/*' + create_nova_keys_dir + sudo mkdir -p /opt/stack/data/nova/keys + sudo chown -R cloudbase /opt/stack/data/nova + is_service_enabled neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + create_nova_conf_neutron + iniset /etc/nova/nova.conf DEFAULT network_api_class nova.network.neutronv2.api.API ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_admin_username neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_admin_password Passw0rd ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_admin_auth_url http://10.14.0.26:35357/v2.0 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_auth_strategy keystone ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_admin_tenant_name service ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_region_name RegionOne ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT neutron_url http://10.14.0.26:9696 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + [[ True == \T\r\u\e ]] + LIBVIRT_FIREWALL_DRIVER=nova.virt.firewall.NoopFirewallDriver + iniset /etc/nova/nova.conf DEFAULT firewall_driver nova.virt.firewall.NoopFirewallDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT security_group_api neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + neutron_plugin_create_nova_conf + _neutron_ovs_base_configure_nova_vif_driver + : + '[' libvirt = xenserver ']' + iniset /etc/nova/nova.conf DEFAULT libvirt_vif_driver nova.virt.libvirt.vif.LibvirtGenericVIFDriver ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT linuxnet_interface_driver '' ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled q-meta ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + iniset /etc/nova/nova.conf DEFAULT service_neutron_metadata_proxy True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT vif_plugging_is_fatal True ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + iniset /etc/nova/nova.conf DEFAULT vif_plugging_timeout 300 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + init_nova_cells + is_service_enabled n-cell ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_baremetal + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ baremetal ]] + return 1 + [[ -d /home/cloudbase/devstack/extras.d ]] + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/50-ironic.sh ]] + source /home/cloudbase/devstack/extras.d/50-ironic.sh stack post-config ++ is_service_enabled ir-api ir-cond +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-gantt.sh ]] + source /home/cloudbase/devstack/extras.d/70-gantt.sh stack post-config ++ is_service_enabled n-sch +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ disable_service gantt ++ local tmpsvcs=,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, ++ local service ++ for service in '$@' ++ is_service_enabled gantt +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 +++ _cleanup_service_list ,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, +++ echo ,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, +++ sed -e ' s/,,/,/g; s/^,//; s/,$// ' ++ ENABLED_SERVICES=g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api ++ is_service_enabled gantt +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-marconi.sh ]] + source /home/cloudbase/devstack/extras.d/70-marconi.sh stack post-config ++ is_service_enabled marconi-server +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-sahara.sh ]] + source /home/cloudbase/devstack/extras.d/70-sahara.sh stack post-config ++ is_service_enabled sahara +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-trove.sh ]] + source /home/cloudbase/devstack/extras.d/70-trove.sh stack post-config ++ is_service_enabled trove +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/80-opendaylight.sh ]] + source /home/cloudbase/devstack/extras.d/80-opendaylight.sh stack post-config ++ is_service_enabled odl-server odl-compute +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ is_service_enabled odl-server +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ is_service_enabled odl-compute +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/80-tempest.sh ]] + source /home/cloudbase/devstack/extras.d/80-tempest.sh stack post-config ++ is_service_enabled tempest +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ [[ stack == \s\o\u\r\c\e ]] ++ [[ stack == \s\t\a\c\k ]] ++ [[ post-config == \i\n\s\t\a\l\l ]] ++ [[ stack == \s\t\a\c\k ]] ++ [[ post-config == \p\o\s\t\-\c\o\n\f\i\g ]] ++ create_tempest_accounts ++ is_service_enabled tempest +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ openstack project create alt_demo ++ openstack user create --project alt_demo --password Passw0rd alt_demo ++ openstack role add --project alt_demo --user alt_demo Member ++ [[ stack == \u\n\s\t\a\c\k ]] ++ [[ stack == \c\l\e\a\n ]] + merge_config_group /home/cloudbase/devstack/local.conf post-config + local localfile=/home/cloudbase/devstack/local.conf + shift + local matchgroups=post-config + [[ -r /home/cloudbase/devstack/local.conf ]] + for group in '$matchgroups' ++ get_meta_section_files /home/cloudbase/devstack/local.conf post-config ++ local file=/home/cloudbase/devstack/local.conf ++ local matchgroup=post-config ++ [[ -r /home/cloudbase/devstack/local.conf ]] ++ awk -v matchgroup=post-config ' /^\[\[.+\|.*\]\]/ { gsub("[][]", "", $1); split($1, a, "|"); if (a[1] == matchgroup) print a[2] } ' /home/cloudbase/devstack/local.conf + for configfile in '$(get_meta_section_files $localfile $group)' +++ eval 'echo $NOVA_CONF' ++++ echo /etc/nova/nova.conf ++ dirname /etc/nova/nova.conf + [[ -d /etc/nova ]] + merge_config_file /home/cloudbase/devstack/local.conf post-config '$NOVA_CONF' + local file=/home/cloudbase/devstack/local.conf + local matchgroup=post-config + local 'configfile=$NOVA_CONF' + get_meta_section /home/cloudbase/devstack/local.conf post-config '$NOVA_CONF' + awk -v 'configfile=$NOVA_CONF' ' BEGIN { section = "" } /^\[.+\]/ { gsub("[][]", "", $1); section=$1 next } /^ *\#/ { next } /^[^ \t]+/ { split($0, d, " *= *") print "iniset " configfile " " section " " d[1] " \"" d[2] "\"" } ' + local file=/home/cloudbase/devstack/local.conf + local matchgroup=post-config + local 'configfile=$NOVA_CONF' + [[ -r /home/cloudbase/devstack/local.conf ]] + [[ -z $NOVA_CONF ]] + awk -v matchgroup=post-config -v 'configfile=$NOVA_CONF' ' BEGIN { group = "" } /^\[\[.+\|.*\]\]/ { if (group == "") { gsub("[][]", "", $1); split($1, a, "|"); if (a[1] == matchgroup && a[2] == configfile) { group=a[1] } } else { group="" } next } { if (group != "") print $0 } ' /home/cloudbase/devstack/local.conf + read a + eval 'iniset $NOVA_CONF DEFAULT allow_resize_to_same_host "$DEVSTACK_SAME_HOST_RESIZE"' ++ iniset /etc/nova/nova.conf DEFAULT allow_resize_to_same_host True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + read a + for configfile in '$(get_meta_section_files $localfile $group)' +++ eval 'echo $NEUTRON_CONF' ++++ echo /etc/neutron/neutron.conf ++ dirname /etc/neutron/neutron.conf + [[ -d /etc/neutron ]] + merge_config_file /home/cloudbase/devstack/local.conf post-config '$NEUTRON_CONF' + local file=/home/cloudbase/devstack/local.conf + local matchgroup=post-config + local 'configfile=$NEUTRON_CONF' + get_meta_section /home/cloudbase/devstack/local.conf post-config '$NEUTRON_CONF' + awk -v 'configfile=$NEUTRON_CONF' ' BEGIN { section = "" } /^\[.+\]/ { gsub("[][]", "", $1); section=$1 next } /^ *\#/ { next } /^[^ \t]+/ { split($0, d, " *= *") print "iniset " configfile " " section " " d[1] " \"" d[2] "\"" } ' + local file=/home/cloudbase/devstack/local.conf + local matchgroup=post-config + local 'configfile=$NEUTRON_CONF' + [[ -r /home/cloudbase/devstack/local.conf ]] + [[ -z $NEUTRON_CONF ]] + awk -v matchgroup=post-config -v 'configfile=$NEUTRON_CONF' ' BEGIN { group = "" } /^\[\[.+\|.*\]\]/ { if (group == "") { gsub("[][]", "", $1); split($1, a, "|"); if (a[1] == matchgroup && a[2] == configfile) { group=a[1] } } else { group="" } next } { if (group != "") print $0 } ' /home/cloudbase/devstack/local.conf + read a + eval 'iniset $NEUTRON_CONF database min_pool_size "5"' ++ iniset /etc/neutron/neutron.conf database min_pool_size 5 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + read a + eval 'iniset $NEUTRON_CONF database max_pool_size "50"' ++ iniset /etc/neutron/neutron.conf database max_pool_size 50 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + read a + eval 'iniset $NEUTRON_CONF database max_overflow "50"' ++ iniset /etc/neutron/neutron.conf database max_overflow 50 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + read a + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Swift' + [[ -t 3 ]] + echo -e Starting Swift + start_swift + restart_service memcached + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + sudo /usr/sbin/service memcached restart + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + sudo /etc/init.d/rsync restart + is_apache_enabled_service swift + services=swift + for service in '${services}' + [[ ,, =~ ,swift, ]] + return 1 + swift-init --run-dir=/opt/stack/data/swift/run all restart + true + [[ 1 == 1 ]] + todo='object container account' + for type in proxy '${todo}' + swift-init --run-dir=/opt/stack/data/swift/run proxy stop + for type in proxy '${todo}' + swift-init --run-dir=/opt/stack/data/swift/run object stop + for type in proxy '${todo}' + swift-init --run-dir=/opt/stack/data/swift/run container stop + for type in proxy '${todo}' + swift-init --run-dir=/opt/stack/data/swift/run account stop + run_process s-proxy '/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v' + local service=s-proxy + local 'command=/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v' + local group= + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service s-proxy '/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v' '' + local service=s-proxy + local 'command=/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled s-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc s-proxy '/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep s-proxy /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t s-proxy bash' + echo 'stuff "/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-proxy.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t s-proxy + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p s-proxy -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-proxy.2014-10-12-023444.log + screen -S stack -p s-proxy -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-proxy.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-proxy.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p s-proxy -X stuff '/opt/stack/swift/bin/swift-proxy-server /etc/swift/proxy-server.conf -v & echo $! >/opt/stack/status/stack/s-proxy.pid; fg || echo "s-proxy failed to start" | tee "/opt/stack/status/stack/s-proxy.failure" ' + [[ 1 == 1 ]] + for type in object container account + run_process s-object '/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v' + local service=s-object + local 'command=/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v' + local group= + is_service_enabled s-object ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service s-object '/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v' '' + local service=s-object + local 'command=/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled s-object ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc s-object '/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep s-object /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t s-object bash' + echo 'stuff "/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-object.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t s-object + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p s-object -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-object.2014-10-12-023444.log + screen -S stack -p s-object -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-object.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-object.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p s-object -X stuff '/opt/stack/swift/bin/swift-object-server /etc/swift/object-server/1.conf -v & echo $! >/opt/stack/status/stack/s-object.pid; fg || echo "s-object failed to start" | tee "/opt/stack/status/stack/s-object.failure" ' + for type in object container account + run_process s-container '/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v' + local service=s-container + local 'command=/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v' + local group= + is_service_enabled s-container ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service s-container '/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v' '' + local service=s-container + local 'command=/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled s-container ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc s-container '/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep s-container /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t s-container bash' + echo 'stuff "/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-container.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t s-container + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p s-container -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-container.2014-10-12-023444.log + screen -S stack -p s-container -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-container.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-container.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p s-container -X stuff '/opt/stack/swift/bin/swift-container-server /etc/swift/container-server/1.conf -v & echo $! >/opt/stack/status/stack/s-container.pid; fg || echo "s-container failed to start" | tee "/opt/stack/status/stack/s-container.failure" ' + for type in object container account + run_process s-account '/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v' + local service=s-account + local 'command=/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v' + local group= + is_service_enabled s-account ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service s-account '/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v' '' + local service=s-account + local 'command=/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled s-account ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc s-account '/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep s-account /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t s-account bash' + echo 'stuff "/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-account.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t s-account + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p s-account -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-account.2014-10-12-023444.log + screen -S stack -p s-account -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-account.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-s-account.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p s-account -X stuff '/opt/stack/swift/bin/swift-account-server /etc/swift/account-server/1.conf -v & echo $! >/opt/stack/status/stack/s-account.pid; fg || echo "s-account failed to start" | tee "/opt/stack/status/stack/s-account.failure" ' + is_service_enabled glance ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Glance' + [[ -t 3 ]] + echo -e Starting Glance + start_glance + run_process g-reg '/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf' + local service=g-reg + local 'command=/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf' + local group= + is_service_enabled g-reg ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service g-reg '/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf' '' + local service=g-reg + local 'command=/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled g-reg ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc g-reg '/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep g-reg /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t g-reg bash' + echo 'stuff "/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-reg.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t g-reg + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p g-reg -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-reg.2014-10-12-023444.log + screen -S stack -p g-reg -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-reg.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-reg.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p g-reg -X stuff '/usr/local/bin/glance-registry --config-file=/etc/glance/glance-registry.conf & echo $! >/opt/stack/status/stack/g-reg.pid; fg || echo "g-reg failed to start" | tee "/opt/stack/status/stack/g-reg.failure" ' + run_process g-api '/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf' + local service=g-api + local 'command=/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf' + local group= + is_service_enabled g-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service g-api '/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf' '' + local service=g-api + local 'command=/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled g-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc g-api '/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep g-api /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t g-api bash' + echo 'stuff "/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-api.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t g-api + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p g-api -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-api.2014-10-12-023444.log + screen -S stack -p g-api -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-api.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-g-api.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p g-api -X stuff '/usr/local/bin/glance-api --config-file=/etc/glance/glance-api.conf & echo $! >/opt/stack/status/stack/g-api.pid; fg || echo "g-api failed to start" | tee "/opt/stack/status/stack/g-api.failure" ' + echo 'Waiting for g-api (10.14.0.26:9292) to start...' + timeout 60 sh -c 'while ! wget --no-proxy -q -O- http://10.14.0.26:9292; do sleep 1; done' + is_service_enabled g-reg ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 ++ keystone token-get ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ echo '| id | MIITtgYJKoZIhvcNAQcCoIITpzCCE6MCAQExCTAHBgUrDgMCGjCCEgwGCSqGSIb3DQEHAaCCEf0EghH5eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0xMC0xMVQyMzozNzoxOS44NjgxMDciLCAiZXhwaXJlcyI6ICIyMDE0LTEwLTEyVDAwOjM3OjE5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92Mi83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiNmZmNzljZWZiOTQ5NDBhYzkwNmVhODg3YzdmYzRkNjQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojk2OTYvIiwgImlkIjogIjkzZjg2ZTYxNDdjNzQ3NGVhZjc5ODczYjI3MjAxMmI5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJuZXR3b3JrIiwgIm5hbWUiOiAibmV1dHJvbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzYvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgImlkIjogIjliZjEyYThjZGU2NjQ4ODNiM2RiMTE0MjY3MDM3ZTRjIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZXYyIiwgIm5hbWUiOiAiY2luZGVydjIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc0L3YzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyIsICJpZCI6ICI0MTNmMTFhMjNjMjI0OWI4OTMyYjA5NzVhYmYwNjkyOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjb21wdXRldjMiLCAibmFtZSI6ICJub3ZhdjMifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjozMzMzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyIsICJpZCI6ICI5ZjU0YTZjYzU0MDQ0ZDk5YjI3MDk4MGM3OGZhOTNmZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6OTI5MiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIiLCAiaWQiOiAiMTY4Yjg0ZjU5YWUyNGEyYjk4MTg4YjMyMmMwOWJjNGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzcvIiwgImlkIjogIjFjZWIxMzIyYWM0ODRjMGM4OGE2OTcyNGEzN2M0YTg4IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJtZXRlcmluZyIsICJuYW1lIjogImNlaWxvbWV0ZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDAwL3YxIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSIsICJpZCI6ICIxMzU2ZWU4YmI5NDQ0OGRiYjc4NGNmMmIzZjYyNDRkNyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjbG91ZGZvcm1hdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICIxZGM2NjllZDA5Nzk0MWEzYWI0ODU2ZDRlZjNhZjQyNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCIsICJpZCI6ICIwMjZkMjNkZDAxNmQ0ZDFjOWFlNmUyMTAxYzE0NGQxNiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDA0L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICI4MWQ5ODYxOTZmMDA0NTZkYWE5NTgyNjlmYmFjZmE2OSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvcmNoZXN0cmF0aW9uIiwgIm5hbWUiOiAiaGVhdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDgwL3YxL0FVVEhfNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiMTE5MGFhZTI0NmY3NDhhMmE4YmY4ZjYyM2ZkMzZkOTkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAvdjEvQVVUSF83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvYmplY3Qtc3RvcmUiLCAibmFtZSI6ICJzd2lmdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo1MDAwL3YyLjAiLCAiaWQiOiAiMTRiYWY3Y2NkZjE5NGU5ZjhjOGMwYjBmNWYxMDViMzQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjUwMDAvdjIuMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJpZGVudGl0eSIsICJuYW1lIjogImtleXN0b25lIn1dLCAidXNlciI6IHsidXNlcm5hbWUiOiAiYWRtaW4iLCAicm9sZXNfbGlua3MiOiBbXSwgImlkIjogIjg2ODI3OWRlMzE5NzRjYjRiMzBkMGUzNGJhY2E1MTVmIiwgInJvbGVzIjogW3sibmFtZSI6ICJhZG1pbiJ9LCB7Im5hbWUiOiAiaGVhdF9zdGFja19vd25lciJ9LCB7Im5hbWUiOiAiX21lbWJlcl8ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjI1OTVhMTdjMGUwMjRiMjk4NmNjNzAxMWI4MTEzNDUzIiwgImU5ZTM4ODY1NzIxMTQ5MWRhZmQ1ODA5NDJkZTdlZTc2IiwgIjlmZTJmZjllZTQzODRiMTg5NGE5MDg3OGQzZTkyYmFiIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEALvqt4wrcutZpP8GfS6NhGmIAyyGH3P2ZkxaO3xzaz6Hxv0WRB99PfeTQw+zlkGfXccisOGG+A3BZuhlgSTCjYxLU+DYknJOLwCHKHORdpS0b+-Y-imJLzgD4JxWQb0aVWPzFjEhReusDtnYqSeP5fk6NY0U40yBWJHXIrVpmDUi6XU3jAuSMaX2b0W6XsuZpiFFMXQjIvxn0mkL+PclijxIivtzxpgjQX89ajqhXRBWucpL6lUoyxOrDUcqbLF6EBym0q9p8LijHVczv-Uf7B90rHXlNouvx7zLZnqQqQi9GFrIHlbPReTIujZD0S8sB1mFUNzmljrFIbdmAJTBm2g== |' ++ read data + TOKEN=MIITtgYJKoZIhvcNAQcCoIITpzCCE6MCAQExCTAHBgUrDgMCGjCCEgwGCSqGSIb3DQEHAaCCEf0EghH5eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0xMC0xMVQyMzozNzoxOS44NjgxMDciLCAiZXhwaXJlcyI6ICIyMDE0LTEwLTEyVDAwOjM3OjE5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92Mi83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiNmZmNzljZWZiOTQ5NDBhYzkwNmVhODg3YzdmYzRkNjQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojk2OTYvIiwgImlkIjogIjkzZjg2ZTYxNDdjNzQ3NGVhZjc5ODczYjI3MjAxMmI5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJuZXR3b3JrIiwgIm5hbWUiOiAibmV1dHJvbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzYvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgImlkIjogIjliZjEyYThjZGU2NjQ4ODNiM2RiMTE0MjY3MDM3ZTRjIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZXYyIiwgIm5hbWUiOiAiY2luZGVydjIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc0L3YzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyIsICJpZCI6ICI0MTNmMTFhMjNjMjI0OWI4OTMyYjA5NzVhYmYwNjkyOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjb21wdXRldjMiLCAibmFtZSI6ICJub3ZhdjMifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjozMzMzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyIsICJpZCI6ICI5ZjU0YTZjYzU0MDQ0ZDk5YjI3MDk4MGM3OGZhOTNmZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6OTI5MiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIiLCAiaWQiOiAiMTY4Yjg0ZjU5YWUyNGEyYjk4MTg4YjMyMmMwOWJjNGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzcvIiwgImlkIjogIjFjZWIxMzIyYWM0ODRjMGM4OGE2OTcyNGEzN2M0YTg4IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJtZXRlcmluZyIsICJuYW1lIjogImNlaWxvbWV0ZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDAwL3YxIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSIsICJpZCI6ICIxMzU2ZWU4YmI5NDQ0OGRiYjc4NGNmMmIzZjYyNDRkNyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjbG91ZGZvcm1hdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICIxZGM2NjllZDA5Nzk0MWEzYWI0ODU2ZDRlZjNhZjQyNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCIsICJpZCI6ICIwMjZkMjNkZDAxNmQ0ZDFjOWFlNmUyMTAxYzE0NGQxNiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDA0L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICI4MWQ5ODYxOTZmMDA0NTZkYWE5NTgyNjlmYmFjZmE2OSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvcmNoZXN0cmF0aW9uIiwgIm5hbWUiOiAiaGVhdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDgwL3YxL0FVVEhfNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiMTE5MGFhZTI0NmY3NDhhMmE4YmY4ZjYyM2ZkMzZkOTkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAvdjEvQVVUSF83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvYmplY3Qtc3RvcmUiLCAibmFtZSI6ICJzd2lmdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo1MDAwL3YyLjAiLCAiaWQiOiAiMTRiYWY3Y2NkZjE5NGU5ZjhjOGMwYjBmNWYxMDViMzQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjUwMDAvdjIuMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJpZGVudGl0eSIsICJuYW1lIjogImtleXN0b25lIn1dLCAidXNlciI6IHsidXNlcm5hbWUiOiAiYWRtaW4iLCAicm9sZXNfbGlua3MiOiBbXSwgImlkIjogIjg2ODI3OWRlMzE5NzRjYjRiMzBkMGUzNGJhY2E1MTVmIiwgInJvbGVzIjogW3sibmFtZSI6ICJhZG1pbiJ9LCB7Im5hbWUiOiAiaGVhdF9zdGFja19vd25lciJ9LCB7Im5hbWUiOiAiX21lbWJlcl8ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjI1OTVhMTdjMGUwMjRiMjk4NmNjNzAxMWI4MTEzNDUzIiwgImU5ZTM4ODY1NzIxMTQ5MWRhZmQ1ODA5NDJkZTdlZTc2IiwgIjlmZTJmZjllZTQzODRiMTg5NGE5MDg3OGQzZTkyYmFiIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEALvqt4wrcutZpP8GfS6NhGmIAyyGH3P2ZkxaO3xzaz6Hxv0WRB99PfeTQw+zlkGfXccisOGG+A3BZuhlgSTCjYxLU+DYknJOLwCHKHORdpS0b+-Y-imJLzgD4JxWQb0aVWPzFjEhReusDtnYqSeP5fk6NY0U40yBWJHXIrVpmDUi6XU3jAuSMaX2b0W6XsuZpiFFMXQjIvxn0mkL+PclijxIivtzxpgjQX89ajqhXRBWucpL6lUoyxOrDUcqbLF6EBym0q9p8LijHVczv-Uf7B90rHXlNouvx7zLZnqQqQi9GFrIHlbPReTIujZD0S8sB1mFUNzmljrFIbdmAJTBm2g== + die_if_not_set 1108 TOKEN 'Keystone fail to get token' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + is_baremetal + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ baremetal ]] + return 1 + echo_summary 'Uploading images' + [[ -t 3 ]] + echo -e Uploading images + [[ -n '' ]] + for image_url in '${IMAGE_URLS//,/ }' + upload_image file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd MIITtgYJKoZIhvcNAQcCoIITpzCCE6MCAQExCTAHBgUrDgMCGjCCEgwGCSqGSIb3DQEHAaCCEf0EghH5eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0xMC0xMVQyMzozNzoxOS44NjgxMDciLCAiZXhwaXJlcyI6ICIyMDE0LTEwLTEyVDAwOjM3OjE5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92Mi83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiNmZmNzljZWZiOTQ5NDBhYzkwNmVhODg3YzdmYzRkNjQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojk2OTYvIiwgImlkIjogIjkzZjg2ZTYxNDdjNzQ3NGVhZjc5ODczYjI3MjAxMmI5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJuZXR3b3JrIiwgIm5hbWUiOiAibmV1dHJvbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzYvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgImlkIjogIjliZjEyYThjZGU2NjQ4ODNiM2RiMTE0MjY3MDM3ZTRjIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZXYyIiwgIm5hbWUiOiAiY2luZGVydjIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc0L3YzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyIsICJpZCI6ICI0MTNmMTFhMjNjMjI0OWI4OTMyYjA5NzVhYmYwNjkyOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjb21wdXRldjMiLCAibmFtZSI6ICJub3ZhdjMifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjozMzMzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyIsICJpZCI6ICI5ZjU0YTZjYzU0MDQ0ZDk5YjI3MDk4MGM3OGZhOTNmZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6OTI5MiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIiLCAiaWQiOiAiMTY4Yjg0ZjU5YWUyNGEyYjk4MTg4YjMyMmMwOWJjNGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzcvIiwgImlkIjogIjFjZWIxMzIyYWM0ODRjMGM4OGE2OTcyNGEzN2M0YTg4IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJtZXRlcmluZyIsICJuYW1lIjogImNlaWxvbWV0ZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDAwL3YxIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSIsICJpZCI6ICIxMzU2ZWU4YmI5NDQ0OGRiYjc4NGNmMmIzZjYyNDRkNyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjbG91ZGZvcm1hdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICIxZGM2NjllZDA5Nzk0MWEzYWI0ODU2ZDRlZjNhZjQyNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCIsICJpZCI6ICIwMjZkMjNkZDAxNmQ0ZDFjOWFlNmUyMTAxYzE0NGQxNiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDA0L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICI4MWQ5ODYxOTZmMDA0NTZkYWE5NTgyNjlmYmFjZmE2OSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvcmNoZXN0cmF0aW9uIiwgIm5hbWUiOiAiaGVhdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDgwL3YxL0FVVEhfNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiMTE5MGFhZTI0NmY3NDhhMmE4YmY4ZjYyM2ZkMzZkOTkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAvdjEvQVVUSF83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvYmplY3Qtc3RvcmUiLCAibmFtZSI6ICJzd2lmdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo1MDAwL3YyLjAiLCAiaWQiOiAiMTRiYWY3Y2NkZjE5NGU5ZjhjOGMwYjBmNWYxMDViMzQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjUwMDAvdjIuMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJpZGVudGl0eSIsICJuYW1lIjogImtleXN0b25lIn1dLCAidXNlciI6IHsidXNlcm5hbWUiOiAiYWRtaW4iLCAicm9sZXNfbGlua3MiOiBbXSwgImlkIjogIjg2ODI3OWRlMzE5NzRjYjRiMzBkMGUzNGJhY2E1MTVmIiwgInJvbGVzIjogW3sibmFtZSI6ICJhZG1pbiJ9LCB7Im5hbWUiOiAiaGVhdF9zdGFja19vd25lciJ9LCB7Im5hbWUiOiAiX21lbWJlcl8ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjI1OTVhMTdjMGUwMjRiMjk4NmNjNzAxMWI4MTEzNDUzIiwgImU5ZTM4ODY1NzIxMTQ5MWRhZmQ1ODA5NDJkZTdlZTc2IiwgIjlmZTJmZjllZTQzODRiMTg5NGE5MDg3OGQzZTkyYmFiIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEALvqt4wrcutZpP8GfS6NhGmIAyyGH3P2ZkxaO3xzaz6Hxv0WRB99PfeTQw+zlkGfXccisOGG+A3BZuhlgSTCjYxLU+DYknJOLwCHKHORdpS0b+-Y-imJLzgD4JxWQb0aVWPzFjEhReusDtnYqSeP5fk6NY0U40yBWJHXIrVpmDUi6XU3jAuSMaX2b0W6XsuZpiFFMXQjIvxn0mkL+PclijxIivtzxpgjQX89ajqhXRBWucpL6lUoyxOrDUcqbLF6EBym0q9p8LijHVczv-Uf7B90rHXlNouvx7zLZnqQqQi9GFrIHlbPReTIujZD0S8sB1mFUNzmljrFIbdmAJTBm2g== + local image_url=file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd + local token=MIITtgYJKoZIhvcNAQcCoIITpzCCE6MCAQExCTAHBgUrDgMCGjCCEgwGCSqGSIb3DQEHAaCCEf0EghH5eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0xMC0xMVQyMzozNzoxOS44NjgxMDciLCAiZXhwaXJlcyI6ICIyMDE0LTEwLTEyVDAwOjM3OjE5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92Mi83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiNmZmNzljZWZiOTQ5NDBhYzkwNmVhODg3YzdmYzRkNjQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojk2OTYvIiwgImlkIjogIjkzZjg2ZTYxNDdjNzQ3NGVhZjc5ODczYjI3MjAxMmI5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJuZXR3b3JrIiwgIm5hbWUiOiAibmV1dHJvbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzYvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgImlkIjogIjliZjEyYThjZGU2NjQ4ODNiM2RiMTE0MjY3MDM3ZTRjIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZXYyIiwgIm5hbWUiOiAiY2luZGVydjIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc0L3YzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyIsICJpZCI6ICI0MTNmMTFhMjNjMjI0OWI4OTMyYjA5NzVhYmYwNjkyOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjb21wdXRldjMiLCAibmFtZSI6ICJub3ZhdjMifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjozMzMzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyIsICJpZCI6ICI5ZjU0YTZjYzU0MDQ0ZDk5YjI3MDk4MGM3OGZhOTNmZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6OTI5MiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIiLCAiaWQiOiAiMTY4Yjg0ZjU5YWUyNGEyYjk4MTg4YjMyMmMwOWJjNGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzcvIiwgImlkIjogIjFjZWIxMzIyYWM0ODRjMGM4OGE2OTcyNGEzN2M0YTg4IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJtZXRlcmluZyIsICJuYW1lIjogImNlaWxvbWV0ZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDAwL3YxIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSIsICJpZCI6ICIxMzU2ZWU4YmI5NDQ0OGRiYjc4NGNmMmIzZjYyNDRkNyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjbG91ZGZvcm1hdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICIxZGM2NjllZDA5Nzk0MWEzYWI0ODU2ZDRlZjNhZjQyNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCIsICJpZCI6ICIwMjZkMjNkZDAxNmQ0ZDFjOWFlNmUyMTAxYzE0NGQxNiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDA0L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICI4MWQ5ODYxOTZmMDA0NTZkYWE5NTgyNjlmYmFjZmE2OSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvcmNoZXN0cmF0aW9uIiwgIm5hbWUiOiAiaGVhdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDgwL3YxL0FVVEhfNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiMTE5MGFhZTI0NmY3NDhhMmE4YmY4ZjYyM2ZkMzZkOTkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAvdjEvQVVUSF83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvYmplY3Qtc3RvcmUiLCAibmFtZSI6ICJzd2lmdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo1MDAwL3YyLjAiLCAiaWQiOiAiMTRiYWY3Y2NkZjE5NGU5ZjhjOGMwYjBmNWYxMDViMzQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjUwMDAvdjIuMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJpZGVudGl0eSIsICJuYW1lIjogImtleXN0b25lIn1dLCAidXNlciI6IHsidXNlcm5hbWUiOiAiYWRtaW4iLCAicm9sZXNfbGlua3MiOiBbXSwgImlkIjogIjg2ODI3OWRlMzE5NzRjYjRiMzBkMGUzNGJhY2E1MTVmIiwgInJvbGVzIjogW3sibmFtZSI6ICJhZG1pbiJ9LCB7Im5hbWUiOiAiaGVhdF9zdGFja19vd25lciJ9LCB7Im5hbWUiOiAiX21lbWJlcl8ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjI1OTVhMTdjMGUwMjRiMjk4NmNjNzAxMWI4MTEzNDUzIiwgImU5ZTM4ODY1NzIxMTQ5MWRhZmQ1ODA5NDJkZTdlZTc2IiwgIjlmZTJmZjllZTQzODRiMTg5NGE5MDg3OGQzZTkyYmFiIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEALvqt4wrcutZpP8GfS6NhGmIAyyGH3P2ZkxaO3xzaz6Hxv0WRB99PfeTQw+zlkGfXccisOGG+A3BZuhlgSTCjYxLU+DYknJOLwCHKHORdpS0b+-Y-imJLzgD4JxWQb0aVWPzFjEhReusDtnYqSeP5fk6NY0U40yBWJHXIrVpmDUi6XU3jAuSMaX2b0W6XsuZpiFFMXQjIvxn0mkL+PclijxIivtzxpgjQX89ajqhXRBWucpL6lUoyxOrDUcqbLF6EBym0q9p8LijHVczv-Uf7B90rHXlNouvx7zLZnqQqQi9GFrIHlbPReTIujZD0S8sB1mFUNzmljrFIbdmAJTBm2g== + mkdir -p /home/cloudbase/devstack/files/images ++ basename file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd + IMAGE_FNAME=cirros-0.3.3-x86_64.vhd + [[ file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd != file* ]] ++ echo file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd ++ sed 's/^file:\/\///g' + IMAGE=/home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd + [[ ! -f /home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd ]] ++ stat -c %s /home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd + [[ 29369856 == \0 ]] + [[ file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd =~ openvz ]] + [[ file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd =~ \.vmdk ]] + [[ file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd =~ \.vhd\.tgz ]] + [[ file:///home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd =~ \.xen-raw\.tgz ]] + KERNEL= + RAMDISK= + DISK_FORMAT= + CONTAINER_FORMAT= + UNPACK= + case "$IMAGE_FNAME" in + local extension=3.3-x86_64.vhd ++ basename /home/cloudbase/devstack/cirros-0.3.3-x86_64.vhd .3.3-x86_64.vhd + IMAGE_NAME=cirros-0 + DISK_FORMAT=vhd + CONTAINER_FORMAT=bare + '[' vhd == gz ']' + is_arch ppc64 + ARCH_TYPE=ppc64 ++ uname -m + [[ x86_64 == \p\p\c\6\4 ]] + '[' bare = bare ']' + '[' '' = zcat ']' + glance --os-auth-token MIITtgYJKoZIhvcNAQcCoIITpzCCE6MCAQExCTAHBgUrDgMCGjCCEgwGCSqGSIb3DQEHAaCCEf0EghH5eyJhY2Nlc3MiOiB7InRva2VuIjogeyJpc3N1ZWRfYXQiOiAiMjAxNC0xMC0xMVQyMzozNzoxOS44NjgxMDciLCAiZXhwaXJlcyI6ICIyMDE0LTEwLTEyVDAwOjM3OjE5WiIsICJpZCI6ICJwbGFjZWhvbGRlciIsICJ0ZW5hbnQiOiB7ImRlc2NyaXB0aW9uIjogbnVsbCwgImVuYWJsZWQiOiB0cnVlLCAiaWQiOiAiNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAibmFtZSI6ICJhZG1pbiJ9fSwgInNlcnZpY2VDYXRhbG9nIjogW3siZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92Mi83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiNmZmNzljZWZiOTQ5NDBhYzkwNmVhODg3YzdmYzRkNjQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzQvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiY29tcHV0ZSIsICJuYW1lIjogIm5vdmEifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojk2OTYvIiwgImlkIjogIjkzZjg2ZTYxNDdjNzQ3NGVhZjc5ODczYjI3MjAxMmI5IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo5Njk2LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJuZXR3b3JrIiwgIm5hbWUiOiAibmV1dHJvbiJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzYvdjIvNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgImlkIjogIjliZjEyYThjZGU2NjQ4ODNiM2RiMTE0MjY3MDM3ZTRjIiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YyLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIn1dLCAiZW5kcG9pbnRzX2xpbmtzIjogW10sICJ0eXBlIjogInZvbHVtZXYyIiwgIm5hbWUiOiAiY2luZGVydjIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc0L3YzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyIsICJpZCI6ICI0MTNmMTFhMjNjMjI0OWI4OTMyYjA5NzVhYmYwNjkyOSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3NC92MyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjb21wdXRldjMiLCAibmFtZSI6ICJub3ZhdjMifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjozMzMzIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyIsICJpZCI6ICI5ZjU0YTZjYzU0MDQ0ZDk5YjI3MDk4MGM3OGZhOTNmZiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6MzMzMyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJzMyIsICJuYW1lIjogInMzIn0sIHsiZW5kcG9pbnRzIjogW3siYWRtaW5VUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6OTI5MiIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIiLCAiaWQiOiAiMTY4Yjg0ZjU5YWUyNGEyYjk4MTg4YjMyMmMwOWJjNGIiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjkyOTIifV0sICJlbmRwb2ludHNfbGlua3MiOiBbXSwgInR5cGUiOiAiaW1hZ2UiLCAibmFtZSI6ICJnbGFuY2UifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyIsICJyZWdpb24iOiAiUmVnaW9uT25lIiwgImludGVybmFsVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2Ojg3NzcvIiwgImlkIjogIjFjZWIxMzIyYWM0ODRjMGM4OGE2OTcyNGEzN2M0YTg4IiwgInB1YmxpY1VSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc3LyJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJtZXRlcmluZyIsICJuYW1lIjogImNlaWxvbWV0ZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDAwL3YxIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSIsICJpZCI6ICIxMzU2ZWU4YmI5NDQ0OGRiYjc4NGNmMmIzZjYyNDRkNyIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwMC92MSJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJjbG91ZGZvcm1hdGlvbiIsICJuYW1lIjogImhlYXQifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4Nzc2L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICIxZGM2NjllZDA5Nzk0MWEzYWI0ODU2ZDRlZjNhZjQyNCIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3Ni92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJ2b2x1bWUiLCAibmFtZSI6ICJjaW5kZXIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4NzczL3NlcnZpY2VzL0FkbWluIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCIsICJpZCI6ICIwMjZkMjNkZDAxNmQ0ZDFjOWFlNmUyMTAxYzE0NGQxNiIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODc3My9zZXJ2aWNlcy9DbG91ZCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJlYzIiLCAibmFtZSI6ICJlYzIifSwgeyJlbmRwb2ludHMiOiBbeyJhZG1pblVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDA0L3YxLzczMGVjZmUzMjM0MjQwZmU4MjA3OGRmNzgwODQxNGZiIiwgInJlZ2lvbiI6ICJSZWdpb25PbmUiLCAiaW50ZXJuYWxVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiIsICJpZCI6ICI4MWQ5ODYxOTZmMDA0NTZkYWE5NTgyNjlmYmFjZmE2OSIsICJwdWJsaWNVUkwiOiAiaHR0cDovLzEwLjE0LjAuMjY6ODAwNC92MS83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvcmNoZXN0cmF0aW9uIiwgIm5hbWUiOiAiaGVhdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo4MDgwL3YxL0FVVEhfNzMwZWNmZTMyMzQyNDBmZTgyMDc4ZGY3ODA4NDE0ZmIiLCAiaWQiOiAiMTE5MGFhZTI0NmY3NDhhMmE4YmY4ZjYyM2ZkMzZkOTkiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjgwODAvdjEvQVVUSF83MzBlY2ZlMzIzNDI0MGZlODIwNzhkZjc4MDg0MTRmYiJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJvYmplY3Qtc3RvcmUiLCAibmFtZSI6ICJzd2lmdCJ9LCB7ImVuZHBvaW50cyI6IFt7ImFkbWluVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjM1MzU3L3YyLjAiLCAicmVnaW9uIjogIlJlZ2lvbk9uZSIsICJpbnRlcm5hbFVSTCI6ICJodHRwOi8vMTAuMTQuMC4yNjo1MDAwL3YyLjAiLCAiaWQiOiAiMTRiYWY3Y2NkZjE5NGU5ZjhjOGMwYjBmNWYxMDViMzQiLCAicHVibGljVVJMIjogImh0dHA6Ly8xMC4xNC4wLjI2OjUwMDAvdjIuMCJ9XSwgImVuZHBvaW50c19saW5rcyI6IFtdLCAidHlwZSI6ICJpZGVudGl0eSIsICJuYW1lIjogImtleXN0b25lIn1dLCAidXNlciI6IHsidXNlcm5hbWUiOiAiYWRtaW4iLCAicm9sZXNfbGlua3MiOiBbXSwgImlkIjogIjg2ODI3OWRlMzE5NzRjYjRiMzBkMGUzNGJhY2E1MTVmIiwgInJvbGVzIjogW3sibmFtZSI6ICJhZG1pbiJ9LCB7Im5hbWUiOiAiaGVhdF9zdGFja19vd25lciJ9LCB7Im5hbWUiOiAiX21lbWJlcl8ifV0sICJuYW1lIjogImFkbWluIn0sICJtZXRhZGF0YSI6IHsiaXNfYWRtaW4iOiAwLCAicm9sZXMiOiBbIjI1OTVhMTdjMGUwMjRiMjk4NmNjNzAxMWI4MTEzNDUzIiwgImU5ZTM4ODY1NzIxMTQ5MWRhZmQ1ODA5NDJkZTdlZTc2IiwgIjlmZTJmZjllZTQzODRiMTg5NGE5MDg3OGQzZTkyYmFiIl19fX0xggGBMIIBfQIBATBcMFcxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVVbnNldDEOMAwGA1UEBwwFVW5zZXQxDjAMBgNVBAoMBVVuc2V0MRgwFgYDVQQDDA93d3cuZXhhbXBsZS5jb20CAQEwBwYFKw4DAhowDQYJKoZIhvcNAQEBBQAEggEALvqt4wrcutZpP8GfS6NhGmIAyyGH3P2ZkxaO3xzaz6Hxv0WRB99PfeTQw+zlkGfXccisOGG+A3BZuhlgSTCjYxLU+DYknJOLwCHKHORdpS0b+-Y-imJLzgD4JxWQb0aVWPzFjEhReusDtnYqSeP5fk6NY0U40yBWJHXIrVpmDUi6XU3jAuSMaX2b0W6XsuZpiFFMXQjIvxn0mkL+PclijxIivtzxpgjQX89ajqhXRBWucpL6lUoyxOrDUcqbLF6EBym0q9p8LijHVczv-Uf7B90rHXlNouvx7zLZnqQqQi9GFrIHlbPReTIujZD0S8sB1mFUNzmljrFIbdmAJTBm2g== --os-image-url http://10.14.0.26:9292 image-create --name cirros-0 --is-public True --container-format=bare --disk-format vhd + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled swift3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + FIXED_KEY= ++ seq 1 64 + for i in '$(seq 1 64)' ++ echo 'obase=16; 12' ++ bc + FIXED_KEY+=C + for i in '$(seq 1 64)' ++ echo 'obase=16; 8' ++ bc + FIXED_KEY+=8 + for i in '$(seq 1 64)' ++ echo 'obase=16; 5' ++ bc + FIXED_KEY+=5 + for i in '$(seq 1 64)' ++ echo 'obase=16; 8' ++ bc + FIXED_KEY+=8 + for i in '$(seq 1 64)' ++ echo 'obase=16; 15' ++ bc + FIXED_KEY+=F + for i in '$(seq 1 64)' ++ echo 'obase=16; 7' ++ bc + FIXED_KEY+=7 + for i in '$(seq 1 64)' ++ echo 'obase=16; 15' ++ bc + FIXED_KEY+=F + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 13' ++ bc + FIXED_KEY+=D + for i in '$(seq 1 64)' ++ echo 'obase=16; 2' ++ bc + FIXED_KEY+=2 + for i in '$(seq 1 64)' ++ echo 'obase=16; 4' ++ bc + FIXED_KEY+=4 + for i in '$(seq 1 64)' ++ echo 'obase=16; 8' ++ bc + FIXED_KEY+=8 + for i in '$(seq 1 64)' ++ echo 'obase=16; 11' ++ bc + FIXED_KEY+=B + for i in '$(seq 1 64)' ++ echo 'obase=16; 8' ++ bc + FIXED_KEY+=8 + for i in '$(seq 1 64)' ++ echo 'obase=16; 3' ++ bc + FIXED_KEY+=3 + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 3' ++ bc + FIXED_KEY+=3 + for i in '$(seq 1 64)' ++ echo 'obase=16; 1' ++ bc + FIXED_KEY+=1 + for i in '$(seq 1 64)' ++ echo 'obase=16; 15' ++ bc + FIXED_KEY+=F + for i in '$(seq 1 64)' ++ echo 'obase=16; 13' ++ bc + FIXED_KEY+=D + for i in '$(seq 1 64)' ++ echo 'obase=16; 7' ++ bc + FIXED_KEY+=7 + for i in '$(seq 1 64)' ++ echo 'obase=16; 6' ++ bc + FIXED_KEY+=6 + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 14' ++ bc + FIXED_KEY+=E + for i in '$(seq 1 64)' ++ echo 'obase=16; 7' ++ bc + FIXED_KEY+=7 + for i in '$(seq 1 64)' ++ echo 'obase=16; 11' ++ bc + FIXED_KEY+=B + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 15' ++ bc + FIXED_KEY+=F + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 12' ++ bc + FIXED_KEY+=C + for i in '$(seq 1 64)' ++ echo 'obase=16; 2' ++ bc + FIXED_KEY+=2 + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 1' ++ bc + FIXED_KEY+=1 + for i in '$(seq 1 64)' ++ echo 'obase=16; 9' ++ bc + FIXED_KEY+=9 + for i in '$(seq 1 64)' ++ echo 'obase=16; 13' ++ bc + FIXED_KEY+=D + for i in '$(seq 1 64)' ++ echo 'obase=16; 11' ++ bc + FIXED_KEY+=B + for i in '$(seq 1 64)' ++ echo 'obase=16; 15' ++ bc + FIXED_KEY+=F + for i in '$(seq 1 64)' ++ echo 'obase=16; 7' ++ bc + FIXED_KEY+=7 + for i in '$(seq 1 64)' ++ echo 'obase=16; 2' ++ bc + FIXED_KEY+=2 + for i in '$(seq 1 64)' ++ echo 'obase=16; 2' ++ bc + FIXED_KEY+=2 + for i in '$(seq 1 64)' ++ echo 'obase=16; 5' ++ bc + FIXED_KEY+=5 + for i in '$(seq 1 64)' ++ echo 'obase=16; 12' ++ bc + FIXED_KEY+=C + for i in '$(seq 1 64)' ++ echo 'obase=16; 3' ++ bc + FIXED_KEY+=3 + for i in '$(seq 1 64)' ++ echo 'obase=16; 10' ++ bc + FIXED_KEY+=A + for i in '$(seq 1 64)' ++ echo 'obase=16; 6' ++ bc + FIXED_KEY+=6 + for i in '$(seq 1 64)' ++ echo 'obase=16; 13' ++ bc + FIXED_KEY+=D + for i in '$(seq 1 64)' ++ echo 'obase=16; 11' ++ bc + FIXED_KEY+=B + for i in '$(seq 1 64)' ++ echo 'obase=16; 9' ++ bc + FIXED_KEY+=9 + for i in '$(seq 1 64)' ++ echo 'obase=16; 11' ++ bc + FIXED_KEY+=B + for i in '$(seq 1 64)' ++ echo 'obase=16; 6' ++ bc + FIXED_KEY+=6 + for i in '$(seq 1 64)' ++ echo 'obase=16; 7' ++ bc + FIXED_KEY+=7 + for i in '$(seq 1 64)' ++ echo 'obase=16; 6' ++ bc + FIXED_KEY+=6 + for i in '$(seq 1 64)' ++ echo 'obase=16; 4' ++ bc + FIXED_KEY+=4 + for i in '$(seq 1 64)' ++ echo 'obase=16; 9' ++ bc + FIXED_KEY+=9 + for i in '$(seq 1 64)' ++ echo 'obase=16; 13' ++ bc + FIXED_KEY+=D + for i in '$(seq 1 64)' ++ echo 'obase=16; 12' ++ bc + FIXED_KEY+=C + for i in '$(seq 1 64)' ++ echo 'obase=16; 15' ++ bc + FIXED_KEY+=F + for i in '$(seq 1 64)' ++ echo 'obase=16; 2' ++ bc + FIXED_KEY+=2 + for i in '$(seq 1 64)' ++ echo 'obase=16; 10' ++ bc + FIXED_KEY+=A + for i in '$(seq 1 64)' ++ echo 'obase=16; 10' ++ bc + FIXED_KEY+=A + for i in '$(seq 1 64)' ++ echo 'obase=16; 2' ++ bc + FIXED_KEY+=2 + for i in '$(seq 1 64)' ++ echo 'obase=16; 10' ++ bc + FIXED_KEY+=A + for i in '$(seq 1 64)' ++ echo 'obase=16; 0' ++ bc + FIXED_KEY+=0 + for i in '$(seq 1 64)' ++ echo 'obase=16; 8' ++ bc + FIXED_KEY+=8 + iniset /etc/nova/nova.conf keymgr fixed_key C858F7F0D248B83031FD760E7B0F0C2019DBF7225C3A6DB9B67649DCF2AA2A08 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + is_service_enabled zeromq ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Nova API' + [[ -t 3 ]] + echo -e Starting Nova API + start_nova_api + local service_port=8774 + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-api /usr/local/bin/nova-api + local service=n-api + local command=/usr/local/bin/nova-api + local group= + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-api /usr/local/bin/nova-api '' + local service=n-api + local command=/usr/local/bin/nova-api + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-api /usr/local/bin/nova-api + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-api /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-api bash' + echo 'stuff "/usr/local/bin/nova-api "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-api.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-api + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-api -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-api.2014-10-12-023444.log + screen -S stack -p n-api -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-api.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-api.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-api -X stuff '/usr/local/bin/nova-api & echo $! >/opt/stack/status/stack/n-api.pid; fg || echo "n-api failed to start" | tee "/opt/stack/status/stack/n-api.failure" ' + echo 'Waiting for nova-api to start...' + wait_for_service 60 http://10.14.0.26:8774 + local timeout=60 + local url=http://10.14.0.26:8774 + timeout 60 sh -c 'while ! curl --noproxy '\''*'\'' -s http://10.14.0.26:8774 >/dev/null; do sleep 1; done' + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled q-svc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Neutron' + [[ -t 3 ]] + echo -e Starting Neutron + start_neutron_service_and_check + local cfg_file + local 'CFG_FILE_OPTIONS=--config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + run_process q-svc 'python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + local service=q-svc + local 'command=python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + local group= + is_service_enabled q-svc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-svc 'python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' '' + local service=q-svc + local 'command=python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-svc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-svc 'python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-svc /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-svc bash' + echo 'stuff "python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-svc.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-svc + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-svc -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-svc.2014-10-12-023444.log + screen -S stack -p q-svc -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-svc.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-svc.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-svc -X stuff 'python /usr/local/bin/neutron-server --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini & echo $! >/opt/stack/status/stack/q-svc.pid; fg || echo "q-svc failed to start" | tee "/opt/stack/status/stack/q-svc.failure" ' + echo 'Waiting for Neutron to start...' + timeout 60 sh -c 'while ! wget --no-proxy -q -O- http://10.14.0.26:9696; do sleep 1; done' + check_neutron_third_party_integration + _neutron_third_party_do check + is_service_enabled neutron ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + start_neutron_agents + run_process q-agt 'python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + local service=q-agt + local 'command=python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + local group= + is_service_enabled q-agt ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-agt 'python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' '' + local service=q-agt + local 'command=python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-agt ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-agt 'python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-agt /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-agt bash' + echo 'stuff "python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-agt.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-agt + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-agt -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-agt.2014-10-12-023444.log + screen -S stack -p q-agt -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-agt.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-agt.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-agt -X stuff 'python /usr/local/bin/neutron-openvswitch-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugins/ml2/ml2_conf.ini & echo $! >/opt/stack/status/stack/q-agt.pid; fg || echo "q-agt failed to start" | tee "/opt/stack/status/stack/q-agt.failure" ' + run_process q-dhcp 'python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini' + local service=q-dhcp + local 'command=python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini' + local group= + is_service_enabled q-dhcp ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-dhcp 'python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini' '' + local service=q-dhcp + local 'command=python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-dhcp ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-dhcp 'python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-dhcp /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-dhcp bash' + echo 'stuff "python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-dhcp.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-dhcp + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-dhcp -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-dhcp.2014-10-12-023444.log + screen -S stack -p q-dhcp -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-dhcp.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-dhcp.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-dhcp -X stuff 'python /usr/local/bin/neutron-dhcp-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/dhcp_agent.ini & echo $! >/opt/stack/status/stack/q-dhcp.pid; fg || echo "q-dhcp failed to start" | tee "/opt/stack/status/stack/q-dhcp.failure" ' + L3_CONF_FILES='--config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini' + is_service_enabled q-fwaas ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + L3_CONF_FILES='--config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini' + is_service_enabled q-vpn ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + run_process q-vpn '/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini' + local service=q-vpn + local 'command=/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini' + local group= + is_service_enabled q-vpn ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-vpn '/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini' '' + local service=q-vpn + local 'command=/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-vpn ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-vpn '/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-vpn /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-vpn bash' + echo 'stuff "/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-vpn.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-vpn + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-vpn -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-vpn.2014-10-12-023444.log + screen -S stack -p q-vpn -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-vpn.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-vpn.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-vpn -X stuff '/usr/local/bin/neutron-vpn-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/l3_agent.ini --config-file /etc/neutron/fwaas_driver.ini & echo $! >/opt/stack/status/stack/q-vpn.pid; fg || echo "q-vpn failed to start" | tee "/opt/stack/status/stack/q-vpn.failure" ' + run_process q-meta 'python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini' + local service=q-meta + local 'command=python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini' + local group= + is_service_enabled q-meta ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-meta 'python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini' '' + local service=q-meta + local 'command=python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-meta ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-meta 'python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-meta /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-meta bash' + echo 'stuff "python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-meta.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-meta + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-meta -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-meta.2014-10-12-023444.log + screen -S stack -p q-meta -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-meta.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-meta.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-meta -X stuff 'python /usr/local/bin/neutron-metadata-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/metadata_agent.ini & echo $! >/opt/stack/status/stack/q-meta.pid; fg || echo "q-meta failed to start" | tee "/opt/stack/status/stack/q-meta.failure" ' + '[' libvirt = xenserver ']' + is_service_enabled q-lbaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + run_process q-lbaas 'python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini' + local service=q-lbaas + local 'command=python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini' + local group= + is_service_enabled q-lbaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-lbaas 'python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini' '' + local service=q-lbaas + local 'command=python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-lbaas ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-lbaas 'python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-lbaas /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-lbaas bash' + echo 'stuff "python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-lbaas.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-lbaas + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-lbaas -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-lbaas.2014-10-12-023444.log + screen -S stack -p q-lbaas -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-lbaas.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-lbaas.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-lbaas -X stuff 'python /usr/local/bin/neutron-lbaas-agent --config-file /etc/neutron/neutron.conf --config-file=/etc/neutron/services/loadbalancer/haproxy/lbaas_agent.ini & echo $! >/opt/stack/status/stack/q-lbaas.pid; fg || echo "q-lbaas failed to start" | tee "/opt/stack/status/stack/q-lbaas.failure" ' + is_service_enabled q-metering ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + run_process q-metering 'python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini' + local service=q-metering + local 'command=python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini' + local group= + is_service_enabled q-metering ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service q-metering 'python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini' '' + local service=q-metering + local 'command=python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled q-metering ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc q-metering 'python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep q-metering /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t q-metering bash' + echo 'stuff "python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-metering.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t q-metering + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p q-metering -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-metering.2014-10-12-023444.log + screen -S stack -p q-metering -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-metering.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-q-metering.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p q-metering -X stuff 'python /usr/local/bin/neutron-metering-agent --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/services/metering/metering_agent.ini & echo $! >/opt/stack/status/stack/q-metering.pid; fg || echo "q-metering failed to start" | tee "/opt/stack/status/stack/q-metering.failure" ' + is_service_enabled q-svc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Creating initial neutron network elements' + [[ -t 3 ]] + echo -e Creating initial neutron network elements + create_neutron_initial_network ++ openstack project list ++ grep ' demo ' ++ get_field 1 ++ read data ++ '[' 1 -lt 0 ']' ++ field='$2' ++ echo '| 70fae7298c364df6bd617e37e0df13e8 | demo |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $2}' ++ read data + TENANT_ID=70fae7298c364df6bd617e37e0df13e8 + die_if_not_set 374 TENANT_ID 'Failure retrieving TENANT_ID for demo' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + is_baremetal + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ baremetal ]] + return 1 ++ neutron net-create --tenant-id 70fae7298c364df6bd617e37e0df13e8 private ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | b6257741-4979-408f-b1fa-55ed1de71904 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + NET_ID=b6257741-4979-408f-b1fa-55ed1de71904 + die_if_not_set 396 NET_ID 'Failure creating NET_ID for physnet1 70fae7298c364df6bd617e37e0df13e8' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace ++ neutron subnet-create --tenant-id 70fae7298c364df6bd617e37e0df13e8 --ip_version 4 --gateway 10.0.0.1 --name private-subnet b6257741-4979-408f-b1fa-55ed1de71904 10.0.0.0/24 ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 513315ef-8b11-43c1-b17e-e99da98828b2 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + SUBNET_ID=513315ef-8b11-43c1-b17e-e99da98828b2 + die_if_not_set 398 SUBNET_ID 'Failure creating SUBNET_ID for 70fae7298c364df6bd617e37e0df13e8' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + [[ True == \T\r\u\e ]] + [[ True == \T\r\u\e ]] ++ neutron router-create --tenant-id 70fae7298c364df6bd617e37e0df13e8 router1 ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 2c5c0d7d-106a-453d-abb9-1add642b4ea8 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + ROUTER_ID=2c5c0d7d-106a-453d-abb9-1add642b4ea8 + die_if_not_set 406 ROUTER_ID 'Failure creating ROUTER_ID for 70fae7298c364df6bd617e37e0df13e8 router1' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + neutron router-interface-add 2c5c0d7d-106a-453d-abb9-1add642b4ea8 513315ef-8b11-43c1-b17e-e99da98828b2 ++ neutron net-create public -- --router:external=True ++ grep ' id ' ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| id | 374a7629-e83d-4ac4-b6b8-083ac2bc2de1 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + EXT_NET_ID=374a7629-e83d-4ac4-b6b8-083ac2bc2de1 + die_if_not_set 415 EXT_NET_ID 'Failure creating EXT_NET_ID for public' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace ++ grep gateway_ip ++ neutron subnet-create --ip_version 4 --gateway 172.24.4.1 --name public-subnet 374a7629-e83d-4ac4-b6b8-083ac2bc2de1 172.24.4.0/24 -- --enable_dhcp=False ++ get_field 2 ++ read data ++ '[' 2 -lt 0 ']' ++ field='$3' ++ echo '| gateway_ip | 172.24.4.1 |' ++ awk '-F[ \t]*\\|[ \t]*' '{print $3}' ++ read data + EXT_GW_IP=172.24.4.1 + die_if_not_set 417 EXT_GW_IP 'Failure creating EXT_GW_IP' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + neutron router-gateway-set 2c5c0d7d-106a-453d-abb9-1add642b4ea8 374a7629-e83d-4ac4-b6b8-083ac2bc2de1 + is_service_enabled q-l3 ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_neutron_ovs_base_plugin + return 0 + [[ True = \T\r\u\e ]] + CIDR_LEN=24 + sudo ip addr add 172.24.4.1/24 dev br-ex + sudo ip link set br-ex up ++ neutron port-list -c fixed_ips -c device_owner ++ grep router_gateway ++ awk -F '"' '{ print $8; }' + ROUTER_GW_IP=172.24.4.2 + die_if_not_set 427 ROUTER_GW_IP 'Failure retrieving ROUTER_GW_IP' + local exitcode=0 ++ set +o ++ grep xtrace + FXTRACE='set -o xtrace' + set +o xtrace + sudo route add -net 10.0.0.0/24 gw 172.24.4.2 + [[ True == \F\a\l\s\e ]] + setup_neutron_debug + [[ False == \T\r\u\e ]] + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Nova' + [[ -t 3 ]] + echo -e Starting Nova + start_nova + start_nova_compute + is_service_enabled n-cell ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + local compute_cell_conf=/etc/nova/nova.conf + [[ libvirt = \l\i\b\v\i\r\t ]] + run_process n-cpu '/usr/local/bin/nova-compute --config-file /etc/nova/nova.conf' libvirtd + local service=n-cpu + local 'command=/usr/local/bin/nova-compute --config-file /etc/nova/nova.conf' + local group=libvirtd + is_service_enabled n-cpu ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + start_nova_rest + local api_cell_conf=/etc/nova/nova.conf + is_service_enabled n-cell ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + local compute_cell_conf=/etc/nova/nova.conf + run_process n-cond '/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf' + local service=n-cond + local 'command=/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-cond ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-cond '/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf' '' + local service=n-cond + local 'command=/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-cond ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-cond '/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-cond /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-cond bash' + echo 'stuff "/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cond.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-cond + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-cond -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cond.2014-10-12-023444.log + screen -S stack -p n-cond -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cond.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cond.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-cond -X stuff '/usr/local/bin/nova-conductor --config-file /etc/nova/nova.conf & echo $! >/opt/stack/status/stack/n-cond.pid; fg || echo "n-cond failed to start" | tee "/opt/stack/status/stack/n-cond.failure" ' + run_process n-cell-region '/usr/local/bin/nova-cells --config-file /etc/nova/nova.conf' + local service=n-cell-region + local 'command=/usr/local/bin/nova-cells --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-cell-region ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-cell-child '/usr/local/bin/nova-cells --config-file /etc/nova/nova.conf' + local service=n-cell-child + local 'command=/usr/local/bin/nova-cells --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-cell-child ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-crt '/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf' + local service=n-crt + local 'command=/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-crt ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-crt '/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf' '' + local service=n-crt + local 'command=/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ grep xtrace +++ set +o ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-crt ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-crt '/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-crt /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-crt bash' + echo 'stuff "/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-crt.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-crt + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-crt -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-crt.2014-10-12-023444.log + screen -S stack -p n-crt -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-crt.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-crt.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-crt -X stuff '/usr/local/bin/nova-cert --config-file /etc/nova/nova.conf & echo $! >/opt/stack/status/stack/n-crt.pid; fg || echo "n-crt failed to start" | tee "/opt/stack/status/stack/n-crt.failure" ' + run_process n-net '/usr/local/bin/nova-network --config-file /etc/nova/nova.conf' + local service=n-net + local 'command=/usr/local/bin/nova-network --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-net ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-sch '/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf' + local service=n-sch + local 'command=/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-sch ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-sch '/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf' '' + local service=n-sch + local 'command=/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-sch ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-sch '/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-sch /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-sch bash' + echo 'stuff "/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-sch.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-sch + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-sch -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-sch.2014-10-12-023444.log + screen -S stack -p n-sch -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-sch.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-sch.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-sch -X stuff '/usr/local/bin/nova-scheduler --config-file /etc/nova/nova.conf & echo $! >/opt/stack/status/stack/n-sch.pid; fg || echo "n-sch failed to start" | tee "/opt/stack/status/stack/n-sch.failure" ' + run_process n-api-meta '/usr/local/bin/nova-api-metadata --config-file /etc/nova/nova.conf' + local service=n-api-meta + local 'command=/usr/local/bin/nova-api-metadata --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-api-meta ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-novnc '/usr/local/bin/nova-novncproxy --config-file /etc/nova/nova.conf --web ' + local service=n-novnc + local 'command=/usr/local/bin/nova-novncproxy --config-file /etc/nova/nova.conf --web ' + local group= + is_service_enabled n-novnc ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-xvnc '/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf' + local service=n-xvnc + local 'command=/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-xvnc ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-xvnc '/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf' '' + local service=n-xvnc + local 'command=/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-xvnc ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-xvnc '/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-xvnc /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-xvnc bash' + echo 'stuff "/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-xvnc.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-xvnc + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-xvnc -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-xvnc.2014-10-12-023444.log + screen -S stack -p n-xvnc -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-xvnc.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-xvnc.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-xvnc -X stuff '/usr/local/bin/nova-xvpvncproxy --config-file /etc/nova/nova.conf & echo $! >/opt/stack/status/stack/n-xvnc.pid; fg || echo "n-xvnc failed to start" | tee "/opt/stack/status/stack/n-xvnc.failure" ' + run_process n-spice '/usr/local/bin/nova-spicehtml5proxy --config-file /etc/nova/nova.conf --web ' + local service=n-spice + local 'command=/usr/local/bin/nova-spicehtml5proxy --config-file /etc/nova/nova.conf --web ' + local group= + is_service_enabled n-spice ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-cauth '/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf' + local service=n-cauth + local 'command=/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-cauth ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-cauth '/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf' '' + local service=n-cauth + local 'command=/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-cauth ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-cauth '/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-cauth /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-cauth bash' + echo 'stuff "/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cauth.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-cauth + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-cauth -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cauth.2014-10-12-023444.log + screen -S stack -p n-cauth -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cauth.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-cauth.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-cauth -X stuff '/usr/local/bin/nova-consoleauth --config-file /etc/nova/nova.conf & echo $! >/opt/stack/status/stack/n-cauth.pid; fg || echo "n-cauth failed to start" | tee "/opt/stack/status/stack/n-cauth.failure" ' + is_service_enabled swift3 ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process n-obj '/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf' + local service=n-obj + local 'command=/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf' + local group= + is_service_enabled n-obj ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service n-obj '/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf' '' + local service=n-obj + local 'command=/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled n-obj ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc n-obj '/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep n-obj /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t n-obj bash' + echo 'stuff "/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-obj.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t n-obj + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p n-obj -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-obj.2014-10-12-023444.log + screen -S stack -p n-obj -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-obj.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-n-obj.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p n-obj -X stuff '/usr/local/bin/nova-objectstore --config-file /etc/nova/nova.conf & echo $! >/opt/stack/status/stack/n-obj.pid; fg || echo "n-obj failed to start" | tee "/opt/stack/status/stack/n-obj.failure" ' + is_service_enabled cinder ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Cinder' + [[ -t 3 ]] + echo -e Starting Cinder + start_cinder + is_service_enabled c-vol ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + sudo rm -f /etc/tgt/conf.d/stack.conf + _configure_tgt_for_config_d + [[ ! -d /etc/tgt/stack.d/ ]] + is_ubuntu + [[ -z deb ]] + '[' deb = deb ']' + sudo service tgt restart stop: Unknown instance: + sudo tgtadm --mode system --op update --name debug --value on + run_process c-api '/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf' + local service=c-api + local 'command=/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf' + local group= + is_service_enabled c-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service c-api '/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf' '' + local service=c-api + local 'command=/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled c-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc c-api '/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep c-api /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t c-api bash' + echo 'stuff "/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-api.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t c-api + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p c-api -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-api.2014-10-12-023444.log + screen -S stack -p c-api -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-api.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-api.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p c-api -X stuff '/opt/stack/cinder/bin/cinder-api --config-file /etc/cinder/cinder.conf & echo $! >/opt/stack/status/stack/c-api.pid; fg || echo "c-api failed to start" | tee "/opt/stack/status/stack/c-api.failure" ' + echo 'Waiting for Cinder API to start...' + wait_for_service 60 http://10.14.0.26:8776 + local timeout=60 + local url=http://10.14.0.26:8776 + timeout 60 sh -c 'while ! curl --noproxy '\''*'\'' -s http://10.14.0.26:8776 >/dev/null; do sleep 1; done' + run_process c-sch '/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf' + local service=c-sch + local 'command=/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf' + local group= + is_service_enabled c-sch ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service c-sch '/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf' '' + local service=c-sch + local 'command=/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled c-sch ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc c-sch '/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep c-sch /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t c-sch bash' + echo 'stuff "/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-sch.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t c-sch + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p c-sch -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-sch.2014-10-12-023444.log + screen -S stack -p c-sch -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-sch.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-sch.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p c-sch -X stuff '/opt/stack/cinder/bin/cinder-scheduler --config-file /etc/cinder/cinder.conf & echo $! >/opt/stack/status/stack/c-sch.pid; fg || echo "c-sch failed to start" | tee "/opt/stack/status/stack/c-sch.failure" ' + run_process c-bak '/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf' + local service=c-bak + local 'command=/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf' + local group= + is_service_enabled c-bak ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service c-bak '/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf' '' + local service=c-bak + local 'command=/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled c-bak ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc c-bak '/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep c-bak /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t c-bak bash' + echo 'stuff "/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-bak.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t c-bak + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p c-bak -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-bak.2014-10-12-023444.log + screen -S stack -p c-bak -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-bak.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-bak.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p c-bak -X stuff '/opt/stack/cinder/bin/cinder-backup --config-file /etc/cinder/cinder.conf & echo $! >/opt/stack/status/stack/c-bak.pid; fg || echo "c-bak failed to start" | tee "/opt/stack/status/stack/c-bak.failure" ' + run_process c-vol '/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf' + local service=c-vol + local 'command=/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf' + local group= + is_service_enabled c-vol ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service c-vol '/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf' '' + local service=c-vol + local 'command=/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled c-vol ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc c-vol '/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep c-vol /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t c-vol bash' + echo 'stuff "/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-vol.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t c-vol + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p c-vol -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-vol.2014-10-12-023444.log + screen -S stack -p c-vol -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-vol.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-c-vol.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p c-vol -X stuff '/opt/stack/cinder/bin/cinder-volume --config-file /etc/cinder/cinder.conf & echo $! >/opt/stack/status/stack/c-vol.pid; fg || echo "c-vol failed to start" | tee "/opt/stack/status/stack/c-vol.failure" ' + is_service_enabled c-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled tls-proxy ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled ceilometer ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Starting Ceilometer' + [[ -t 3 ]] + echo -e Starting Ceilometer + init_ceilometer + sudo mkdir -p /var/cache/ceilometer + sudo chown cloudbase /var/cache/ceilometer + rm -f '/var/cache/ceilometer/*' + is_service_enabled mysql postgresql ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + '[' mongodb = mysql ']' + '[' mongodb = postgresql ']' + start_ceilometer + run_process ceilometer-acentral 'ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf' + local service=ceilometer-acentral + local 'command=ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf' + local group= + is_service_enabled ceilometer-acentral ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service ceilometer-acentral 'ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf' '' + local service=ceilometer-acentral + local 'command=ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled ceilometer-acentral ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc ceilometer-acentral 'ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep ceilometer-acentral /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t ceilometer-acentral bash' + echo 'stuff "ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-acentral.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t ceilometer-acentral + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p ceilometer-acentral -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-acentral.2014-10-12-023444.log + screen -S stack -p ceilometer-acentral -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-acentral.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-acentral.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p ceilometer-acentral -X stuff 'ceilometer-agent-central --config-file /etc/ceilometer/ceilometer.conf & echo $! >/opt/stack/status/stack/ceilometer-acentral.pid; fg || echo "ceilometer-acentral failed to start" | tee "/opt/stack/status/stack/ceilometer-acentral.failure" ' + run_process ceilometer-anotification 'ceilometer-agent-notification --config-file /etc/ceilometer/ceilometer.conf' + local service=ceilometer-anotification + local 'command=ceilometer-agent-notification --config-file /etc/ceilometer/ceilometer.conf' + local group= + is_service_enabled ceilometer-anotification ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process ceilometer-collector 'ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf' + local service=ceilometer-collector + local 'command=ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf' + local group= + is_service_enabled ceilometer-collector ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service ceilometer-collector 'ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf' '' + local service=ceilometer-collector + local 'command=ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled ceilometer-collector ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc ceilometer-collector 'ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep ceilometer-collector /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t ceilometer-collector bash' + echo 'stuff "ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-collector.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t ceilometer-collector + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p ceilometer-collector -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-collector.2014-10-12-023444.log + screen -S stack -p ceilometer-collector -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-collector.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-collector.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p ceilometer-collector -X stuff 'ceilometer-collector --config-file /etc/ceilometer/ceilometer.conf & echo $! >/opt/stack/status/stack/ceilometer-collector.pid; fg || echo "ceilometer-collector failed to start" | tee "/opt/stack/status/stack/ceilometer-collector.failure" ' + run_process ceilometer-api 'ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf' + local service=ceilometer-api + local 'command=ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf' + local group= + is_service_enabled ceilometer-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service ceilometer-api 'ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf' '' + local service=ceilometer-api + local 'command=ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled ceilometer-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc ceilometer-api 'ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep ceilometer-api /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t ceilometer-api bash' + echo 'stuff "ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-api.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t ceilometer-api + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p ceilometer-api -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-api.2014-10-12-023444.log + screen -S stack -p ceilometer-api -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-api.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-ceilometer-api.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p ceilometer-api -X stuff 'ceilometer-api -d -v --log-dir=/var/log/ceilometer-api --config-file /etc/ceilometer/ceilometer.conf & echo $! >/opt/stack/status/stack/ceilometer-api.pid; fg || echo "ceilometer-api failed to start" | tee "/opt/stack/status/stack/ceilometer-api.failure" ' + [[ libvirt = \l\i\b\v\i\r\t ]] + run_process ceilometer-acompute 'ceilometer-agent-compute --config-file /etc/ceilometer/ceilometer.conf' libvirtd + local service=ceilometer-acompute + local 'command=ceilometer-agent-compute --config-file /etc/ceilometer/ceilometer.conf' + local group=libvirtd + is_service_enabled ceilometer-acompute ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + [[ libvirt = \v\s\p\h\e\r\e ]] + is_service_enabled ceilometer-api ++ grep xtrace ++ set +o + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo 'Waiting for ceilometer-api to start...' + timeout 60 sh -c 'while ! curl --noproxy '\''*'\'' -s http://localhost:8777/v2/ >/dev/null; do sleep 1; done' + run_process ceilometer-alarm-notifier 'ceilometer-alarm-notifier --config-file /etc/ceilometer/ceilometer.conf' + local service=ceilometer-alarm-notifier + local 'command=ceilometer-alarm-notifier --config-file /etc/ceilometer/ceilometer.conf' + local group= + is_service_enabled ceilometer-alarm-notifier ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + run_process ceilometer-alarm-evaluator 'ceilometer-alarm-evaluator --config-file /etc/ceilometer/ceilometer.conf' + local service=ceilometer-alarm-evaluator + local 'command=ceilometer-alarm-evaluator --config-file /etc/ceilometer/ceilometer.conf' + local group= + is_service_enabled ceilometer-alarm-evaluator ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 1 + is_service_enabled heat ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + echo_summary 'Configuring Heat' + [[ -t 3 ]] + echo -e Configuring Heat + init_heat + recreate_database heat utf8 + local db=heat + local charset=utf8 + recreate_database_mysql heat utf8 + local db=heat + local charset=utf8 + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'DROP DATABASE IF EXISTS heat;' + mysql -uroot -pPassw0rd -h127.0.0.1 -e 'CREATE DATABASE heat CHARACTER SET utf8;' + /opt/stack/heat/bin/heat-manage db_sync No handlers could be found for logger "heat.common.config" 2014-10-12 02:38:37.371 21937 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/heat/heat/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:38:37.371 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/015_grizzly.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.371 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/015_grizzly.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.371 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/016_timeout_nullable.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.371 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/016_timeout_nullable.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/017_event_state_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/017_event_state_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/018_resource_id_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/018_resource_id_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/019_resource_action_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/019_resource_action_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/020_stack_action.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/020_stack_action.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/021_resource_data.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/021_resource_data.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/022_stack_event_soft_delete.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/022_stack_event_soft_delete.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.372 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/023_raw_template_mysql_longtext.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/023_raw_template_mysql_longtext.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/024_event_resource_name.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/024_event_resource_name.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/025_user_creds_drop_service.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/025_user_creds_drop_service.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/026_user_creds_drop_aws.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/026_user_creds_drop_aws.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/027_user_creds_trusts.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/027_user_creds_trusts.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/028_text_mysql_longtext.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/028_text_mysql_longtext.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/029_event_id_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/029_event_id_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/030_remove_uuidutils.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/030_remove_uuidutils.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.373 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/031_stack_lock.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/031_stack_lock.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/032_decrypt_method.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/032_decrypt_method.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/033_software_config.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/033_software_config.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/034_raw_template_files.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/034_raw_template_files.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/035_event_uuid_to_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/035_event_uuid_to_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/036_stack_domain_project.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/036_stack_domain_project.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/037_migrate_hot_template.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/037_migrate_hot_template.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/038_software_config_json_config.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/038_software_config_json_config.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.374 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/039_user_creds_nullable.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/039_user_creds_nullable.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/040_software_deployment_no_signal_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/040_software_deployment_no_signal_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/041_migrate_hot_template_resources.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/041_migrate_hot_template_resources.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.repository [-] Repository /opt/stack/heat/heat/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:38:37.375 21937 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'heat'), ('version_table', 'migrate_version'), ('required_dbs', '[]'), ('use_timestamp_numbering', 'False')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:38:37.380 21937 WARNING heat.openstack.common.db.sqlalchemy.session [-] This application has not enabled MySQL traditional mode, which means silent data corruption may occur. Please encourage the application developers to enable this mode. 2014-10-12 02:38:37.405 21937 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/heat/heat/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/015_grizzly.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/015_grizzly.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/016_timeout_nullable.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/016_timeout_nullable.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/017_event_state_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/017_event_state_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/018_resource_id_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/018_resource_id_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/019_resource_action_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/019_resource_action_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.406 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/020_stack_action.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/020_stack_action.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/021_resource_data.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/021_resource_data.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/022_stack_event_soft_delete.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/022_stack_event_soft_delete.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/023_raw_template_mysql_longtext.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/023_raw_template_mysql_longtext.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/024_event_resource_name.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/024_event_resource_name.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/025_user_creds_drop_service.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/025_user_creds_drop_service.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/026_user_creds_drop_aws.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/026_user_creds_drop_aws.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/027_user_creds_trusts.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/027_user_creds_trusts.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.407 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/028_text_mysql_longtext.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/028_text_mysql_longtext.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/029_event_id_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/029_event_id_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/030_remove_uuidutils.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/030_remove_uuidutils.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/031_stack_lock.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/031_stack_lock.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/032_decrypt_method.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/032_decrypt_method.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/033_software_config.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/033_software_config.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/034_raw_template_files.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/034_raw_template_files.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/035_event_uuid_to_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/035_event_uuid_to_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.408 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/036_stack_domain_project.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/036_stack_domain_project.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/037_migrate_hot_template.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/037_migrate_hot_template.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/038_software_config_json_config.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/038_software_config_json_config.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/039_user_creds_nullable.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/039_user_creds_nullable.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/040_software_deployment_no_signal_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/040_software_deployment_no_signal_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/041_migrate_hot_template_resources.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/041_migrate_hot_template_resources.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.409 21937 DEBUG migrate.versioning.repository [-] Repository /opt/stack/heat/heat/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:38:37.410 21937 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'heat'), ('version_table', 'migrate_version'), ('required_dbs', '[]'), ('use_timestamp_numbering', 'False')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:38:37.472 21937 DEBUG migrate.versioning.repository [-] Loading repository /opt/stack/heat/heat/db/sqlalchemy/migrate_repo... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:76 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/015_grizzly.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/015_grizzly.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/016_timeout_nullable.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/016_timeout_nullable.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/017_event_state_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/017_event_state_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/018_resource_id_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/018_resource_id_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/019_resource_action_status.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.473 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/019_resource_action_status.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/020_stack_action.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/020_stack_action.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/021_resource_data.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/021_resource_data.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/022_stack_event_soft_delete.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/022_stack_event_soft_delete.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/023_raw_template_mysql_longtext.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/023_raw_template_mysql_longtext.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.474 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/024_event_resource_name.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/024_event_resource_name.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/025_user_creds_drop_service.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/025_user_creds_drop_service.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/026_user_creds_drop_aws.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/026_user_creds_drop_aws.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/027_user_creds_trusts.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/027_user_creds_trusts.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/028_text_mysql_longtext.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.475 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/028_text_mysql_longtext.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/029_event_id_to_uuid.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/029_event_id_to_uuid.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/030_remove_uuidutils.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/030_remove_uuidutils.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/031_stack_lock.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/031_stack_lock.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/032_decrypt_method.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.476 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/032_decrypt_method.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/033_software_config.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/033_software_config.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/034_raw_template_files.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/034_raw_template_files.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/035_event_uuid_to_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/035_event_uuid_to_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/036_stack_domain_project.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/036_stack_domain_project.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/037_migrate_hot_template.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/037_migrate_hot_template.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/038_software_config_json_config.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/038_software_config_json_config.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/039_user_creds_nullable.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.477 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/039_user_creds_nullable.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/040_software_deployment_no_signal_id.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/040_software_deployment_no_signal_id.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/041_migrate_hot_template_resources.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/041_migrate_hot_template_resources.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/042_software_deployment_domain_project.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Loading script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py... __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:27 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.script.base [-] Script /opt/stack/heat/heat/db/sqlalchemy/migrate_repo/versions/043_migrate_template_versions.py loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/script/base.py:30 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.repository [-] Repository /opt/stack/heat/heat/db/sqlalchemy/migrate_repo loaded successfully __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:82 2014-10-12 02:38:37.478 21937 DEBUG migrate.versioning.repository [-] Config: OrderedDict([('db_settings', OrderedDict([('__name__', 'db_settings'), ('repository_id', 'heat'), ('version_table', 'migrate_version'), ('required_dbs', '[]'), ('use_timestamp_numbering', 'False')]))]) __init__ /usr/local/lib/python2.7/dist-packages/migrate/versioning/repository.py:83 2014-10-12 02:38:37.483 21937 INFO migrate.versioning.api [-] 14 -> 15... 2014-10-12 02:38:37.694 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:37.694 21937 INFO migrate.versioning.api [-] 15 -> 16... 2014-10-12 02:38:37.769 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:37.769 21937 INFO migrate.versioning.api [-] 16 -> 17... 2014-10-12 02:38:37.899 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:37.899 21937 INFO migrate.versioning.api [-] 17 -> 18... 2014-10-12 02:38:37.981 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:37.981 21937 INFO migrate.versioning.api [-] 18 -> 19... 2014-10-12 02:38:38.168 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.168 21937 INFO migrate.versioning.api [-] 19 -> 20... 2014-10-12 02:38:38.250 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.250 21937 INFO migrate.versioning.api [-] 20 -> 21... 2014-10-12 02:38:38.297 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.297 21937 INFO migrate.versioning.api [-] 21 -> 22... 2014-10-12 02:38:38.366 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.366 21937 INFO migrate.versioning.api [-] 22 -> 23... 2014-10-12 02:38:38.448 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.449 21937 INFO migrate.versioning.api [-] 23 -> 24... 2014-10-12 02:38:38.545 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.546 21937 INFO migrate.versioning.api [-] 24 -> 25... 2014-10-12 02:38:38.662 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.663 21937 INFO migrate.versioning.api [-] 25 -> 26... 2014-10-12 02:38:38.778 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.778 21937 INFO migrate.versioning.api [-] 26 -> 27... 2014-10-12 02:38:38.909 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:38.909 21937 INFO migrate.versioning.api [-] 27 -> 28... 2014-10-12 02:38:39.133 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.133 21937 INFO migrate.versioning.api [-] 28 -> 29... 2014-10-12 02:38:39.199 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.199 21937 INFO migrate.versioning.api [-] 29 -> 30... 2014-10-12 02:38:39.267 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.267 21937 INFO migrate.versioning.api [-] 30 -> 31... 2014-10-12 02:38:39.307 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.308 21937 INFO migrate.versioning.api [-] 31 -> 32... 2014-10-12 02:38:39.455 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.455 21937 INFO migrate.versioning.api [-] 32 -> 33... /usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/default.py:436: Warning: Specified key was too long; max key length is 767 bytes cursor.execute(statement, parameters) 2014-10-12 02:38:39.695 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.695 21937 INFO migrate.versioning.api [-] 33 -> 34... 2014-10-12 02:38:39.761 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:39.762 21937 INFO migrate.versioning.api [-] 34 -> 35... 2014-10-12 02:38:40.137 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.137 21937 INFO migrate.versioning.api [-] 35 -> 36... 2014-10-12 02:38:40.195 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.195 21937 INFO migrate.versioning.api [-] 36 -> 37... 2014-10-12 02:38:40.268 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.268 21937 INFO migrate.versioning.api [-] 37 -> 38... 2014-10-12 02:38:40.345 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.345 21937 INFO migrate.versioning.api [-] 38 -> 39... 2014-10-12 02:38:40.420 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.420 21937 INFO migrate.versioning.api [-] 39 -> 40... 2014-10-12 02:38:40.512 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.512 21937 INFO migrate.versioning.api [-] 40 -> 41... 2014-10-12 02:38:40.532 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.532 21937 INFO migrate.versioning.api [-] 41 -> 42... 2014-10-12 02:38:40.610 21937 INFO migrate.versioning.api [-] done 2014-10-12 02:38:40.610 21937 INFO migrate.versioning.api [-] 42 -> 43... 2014-10-12 02:38:40.634 21937 INFO migrate.versioning.api [-] done + create_heat_cache_dir + sudo mkdir -p /var/cache/heat + sudo chown cloudbase /var/cache/heat + echo_summary 'Starting Heat' + [[ -t 3 ]] + echo -e Starting Heat + start_heat + run_process h-eng '/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf' + local service=h-eng + local 'command=/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf' + local group= + is_service_enabled h-eng ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service h-eng '/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf' '' + local service=h-eng + local 'command=/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled h-eng ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc h-eng '/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep h-eng /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t h-eng bash' + echo 'stuff "/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-eng.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t h-eng + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p h-eng -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-eng.2014-10-12-023444.log + screen -S stack -p h-eng -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-eng.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-eng.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p h-eng -X stuff '/opt/stack/heat/bin/heat-engine --config-file=/etc/heat/heat.conf & echo $! >/opt/stack/status/stack/h-eng.pid; fg || echo "h-eng failed to start" | tee "/opt/stack/status/stack/h-eng.failure" ' + run_process h-api '/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf' + local service=h-api + local 'command=/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf' + local group= + is_service_enabled h-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service h-api '/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf' '' + local service=h-api + local 'command=/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled h-api ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc h-api '/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep h-api /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t h-api bash' + echo 'stuff "/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t h-api + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p h-api -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api.2014-10-12-023444.log + screen -S stack -p h-api -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p h-api -X stuff '/opt/stack/heat/bin/heat-api --config-file=/etc/heat/heat.conf & echo $! >/opt/stack/status/stack/h-api.pid; fg || echo "h-api failed to start" | tee "/opt/stack/status/stack/h-api.failure" ' + run_process h-api-cfn '/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf' + local service=h-api-cfn + local 'command=/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf' + local group= + is_service_enabled h-api-cfn ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service h-api-cfn '/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf' '' + local service=h-api-cfn + local 'command=/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled h-api-cfn ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc h-api-cfn '/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep h-api-cfn /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t h-api-cfn bash' + echo 'stuff "/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cfn.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t h-api-cfn + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p h-api-cfn -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cfn.2014-10-12-023444.log + screen -S stack -p h-api-cfn -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cfn.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cfn.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p h-api-cfn -X stuff '/opt/stack/heat/bin/heat-api-cfn --config-file=/etc/heat/heat.conf & echo $! >/opt/stack/status/stack/h-api-cfn.pid; fg || echo "h-api-cfn failed to start" | tee "/opt/stack/status/stack/h-api-cfn.failure" ' + run_process h-api-cw '/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf' + local service=h-api-cw + local 'command=/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf' + local group= + is_service_enabled h-api-cw ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + [[ True = \T\r\u\e ]] + screen_service h-api-cw '/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf' '' + local service=h-api-cw + local 'command=/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf' + local group= + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status ++ trueorfalse True True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace + USE_SCREEN=True + is_service_enabled h-api-cw ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + screen_rc h-api-cw '/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf' + SCREEN_NAME=stack + SCREENRC=/home/cloudbase/devstack/stack-screenrc + [[ ! -e /home/cloudbase/devstack/stack-screenrc ]] + grep h-api-cw /home/cloudbase/devstack/stack-screenrc ++ echo -ne '\015' + NL=$'\r' + echo 'screen -t h-api-cw bash' + echo 'stuff "/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf "' + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + echo 'logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cw.2014-10-12-023444.log' + echo 'log on' + screen -S stack -X screen -t h-api-cw + [[ -n /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack ]] + screen -S stack -p h-api-cw -X logfile /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cw.2014-10-12-023444.log + screen -S stack -p h-api-cw -X log on + ln -sf /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cw.2014-10-12-023444.log /home/cloudbase/devstack-hyperv-incubator/reports/2014_10_11_20_55_03_982702456/HyperV2008r2/logs/devstack/screen-h-api-cw.log + sleep 3 ++ echo -ne '\015' + NL=$'\r' + [[ -n '' ]] + screen -S stack -p h-api-cw -X stuff '/opt/stack/heat/bin/heat-api-cloudwatch --config-file=/etc/heat/heat.conf & echo $! >/opt/stack/status/stack/h-api-cw.pid; fg || echo "h-api-cw failed to start" | tee "/opt/stack/status/stack/h-api-cw.failure" ' + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_service_enabled key ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + USERRC_PARAMS='-PA --target-dir /home/cloudbase/devstack/accrc' + '[' -f /opt/stack/data/ca-bundle.pem ']' + /home/cloudbase/devstack/tools/create_userrc.sh -PA --target-dir /home/cloudbase/devstack/accrc + ACCOUNT_DIR=./accrc ++ getopt -o hPAp:u:r:C: -l os-username:,os-password:,os-tenant-name:,os-tenant-id:,os-auth-url:,target-dir:,skip-tenant:,os-cacert:,help,debug -- -PA --target-dir /home/cloudbase/devstack/accrc + options=' -P -A --target-dir '\''/home/cloudbase/devstack/accrc'\'' --' + eval set -- -P -A --target-dir ''\''/home/cloudbase/devstack/accrc'\''' -- ++ set -- -P -A --target-dir /home/cloudbase/devstack/accrc -- + ADDPASS= + SKIP_TENANT=,service, + MODE= + ROLE=Member + USER_NAME= + USER_PASS= + '[' 5 -gt 0 ']' + case "$1" in + ADDPASS=yes + shift + '[' 4 -gt 0 ']' + case "$1" in + MODE=all + shift + '[' 3 -gt 0 ']' + case "$1" in + ACCOUNT_DIR=/home/cloudbase/devstack/accrc + shift + shift + '[' 1 -gt 0 ']' + case "$1" in + shift + break + '[' -z Passw0rd ']' + '[' -z admin -a -z '' ']' + '[' -z admin ']' + '[' -z http://10.14.0.26:35357/v2.0 ']' + USER_PASS=Passw0rd + USER_NAME=admin + '[' -z all ']' + export -n SERVICE_TOKEN SERVICE_ENDPOINT OS_SERVICE_TOKEN OS_SERVICE_ENDPOINT + EC2_URL=http://localhost:8773/service/Cloud + S3_URL=http://localhost:3333 ++ keystone endpoint-get --service ec2 ++ awk '/\|[[:space:]]*ec2.publicURL/ {print $4}' + ec2=http://10.14.0.26:8773/services/Cloud + '[' -n http://10.14.0.26:8773/services/Cloud ']' + EC2_URL=http://10.14.0.26:8773/services/Cloud ++ awk '/\|[[:space:]]*s3.publicURL/ {print $4}' ++ keystone endpoint-get --service s3 + s3=http://10.14.0.26:3333 + '[' -n http://10.14.0.26:3333 ']' + S3_URL=http://10.14.0.26:3333 + mkdir -p /home/cloudbase/devstack/accrc ++ readlink -f /home/cloudbase/devstack/accrc + ACCOUNT_DIR=/home/cloudbase/devstack/accrc + EUCALYPTUS_CERT=/home/cloudbase/devstack/accrc/cacert.pem + '[' -e /home/cloudbase/devstack/accrc/cacert.pem ']' + mv /home/cloudbase/devstack/accrc/cacert.pem /home/cloudbase/devstack/accrc/cacert.pem.old + nova x509-get-root-cert /home/cloudbase/devstack/accrc/cacert.pem + '[' all '!=' create ']' ++ keystone tenant-list ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}' + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo 730ecfe3234240fe82078df7808414fb@admin ++ sed 's/@/ /' + echo ,service, + grep -q ,admin, ++ keystone user-list --tenant-id 730ecfe3234240fe82078df7808414fb ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 868279de31974cb4b30d0e34baca515f@admin ++ sed 's/@/ /' + '[' all = one -a admin '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 868279de31974cb4b30d0e34baca515f admin 730ecfe3234240fe82078df7808414fb admin Passw0rd + local user_id=868279de31974cb4b30d0e34baca515f + local user_name=admin + local tenant_id=730ecfe3234240fe82078df7808414fb + local tenant_name=admin + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 868279de31974cb4b30d0e34baca515f ++ grep -E '^\|[[:space:]]*(admin|730ecfe3234240fe82078df7808414fb)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 868279de31974cb4b30d0e34baca515f --tenant-id 730ecfe3234240fe82078df7808414fb +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | c79f049315bc458d8c69ded0bf88c608 | | secret | 17fce28196d745ab8db35cb7ca3733ca | | tenant_id | 730ecfe3234240fe82078df7808414fb | | trust_id | | | user_id | 868279de31974cb4b30d0e34baca515f | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 868279de31974cb4b30d0e34baca515f ++ grep -E '^\|[[:space:]]*(admin|730ecfe3234240fe82078df7808414fb)[[:space:]]*\|' ++ head -n 1 + line='| admin | c79f049315bc458d8c69ded0bf88c608 | 17fce28196d745ab8db35cb7ca3733ca |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' admin '|' c79f049315bc458d8c69ded0bf88c608 '|' 17fce28196d745ab8db35cb7ca3733ca '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/admin + local rcfile=/home/cloudbase/devstack/accrc/admin/admin + local ec2_cert=/home/cloudbase/devstack/accrc/admin/admin-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/admin/admin-pk.pem + '[' -e /home/cloudbase/devstack/accrc/admin/admin-pk.pem ']' + mv -f /home/cloudbase/devstack/accrc/admin/admin-pk.pem /home/cloudbase/devstack/accrc/admin/admin-pk.pem.old + '[' -e /home/cloudbase/devstack/accrc/admin/admin-cert.pem ']' + mv -f /home/cloudbase/devstack/accrc/admin/admin-cert.pem /home/cloudbase/devstack/accrc/admin/admin-cert.pem.old + nova --os-password Passw0rd --os-username admin --os-tenant-name admin x509-create-cert /home/cloudbase/devstack/accrc/admin/admin-pk.pem /home/cloudbase/devstack/accrc/admin/admin-cert.pem + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo 74c64a24e7784056b031459343442fa7@alt_demo ++ sed 's/@/ /' + echo ,service, + grep -q ,alt_demo, ++ keystone user-list --tenant-id 74c64a24e7784056b031459343442fa7 ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 3c36de3203e545d1b842b908b9a465d0@alt_demo ++ sed 's/@/ /' + '[' all = one -a alt_demo '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 3c36de3203e545d1b842b908b9a465d0 alt_demo 74c64a24e7784056b031459343442fa7 alt_demo Passw0rd + local user_id=3c36de3203e545d1b842b908b9a465d0 + local user_name=alt_demo + local tenant_id=74c64a24e7784056b031459343442fa7 + local tenant_name=alt_demo + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 3c36de3203e545d1b842b908b9a465d0 ++ grep -E '^\|[[:space:]]*(alt_demo|74c64a24e7784056b031459343442fa7)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 3c36de3203e545d1b842b908b9a465d0 --tenant-id 74c64a24e7784056b031459343442fa7 +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | cde4f23423f248808351ec93a6b3fc2f | | secret | 23594f1af4ed4a4493559d7006fa049e | | tenant_id | 74c64a24e7784056b031459343442fa7 | | trust_id | | | user_id | 3c36de3203e545d1b842b908b9a465d0 | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 3c36de3203e545d1b842b908b9a465d0 ++ grep -E '^\|[[:space:]]*(alt_demo|74c64a24e7784056b031459343442fa7)[[:space:]]*\|' ++ head -n 1 + line='| alt_demo | cde4f23423f248808351ec93a6b3fc2f | 23594f1af4ed4a4493559d7006fa049e |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' alt_demo '|' cde4f23423f248808351ec93a6b3fc2f '|' 23594f1af4ed4a4493559d7006fa049e '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/alt_demo + local rcfile=/home/cloudbase/devstack/accrc/alt_demo/alt_demo + local ec2_cert=/home/cloudbase/devstack/accrc/alt_demo/alt_demo-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/alt_demo/alt_demo-pk.pem + '[' -e /home/cloudbase/devstack/accrc/alt_demo/alt_demo-pk.pem ']' + mv -f /home/cloudbase/devstack/accrc/alt_demo/alt_demo-pk.pem /home/cloudbase/devstack/accrc/alt_demo/alt_demo-pk.pem.old + '[' -e /home/cloudbase/devstack/accrc/alt_demo/alt_demo-cert.pem ']' + mv -f /home/cloudbase/devstack/accrc/alt_demo/alt_demo-cert.pem /home/cloudbase/devstack/accrc/alt_demo/alt_demo-cert.pem.old + nova --os-password Passw0rd --os-username alt_demo --os-tenant-name alt_demo x509-create-cert /home/cloudbase/devstack/accrc/alt_demo/alt_demo-pk.pem /home/cloudbase/devstack/accrc/alt_demo/alt_demo-cert.pem + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo 70fae7298c364df6bd617e37e0df13e8@demo ++ sed 's/@/ /' + echo ,service, + grep -q ,demo, ++ keystone user-list --tenant-id 70fae7298c364df6bd617e37e0df13e8 ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 868279de31974cb4b30d0e34baca515f@admin ++ sed 's/@/ /' + '[' all = one -a admin '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 868279de31974cb4b30d0e34baca515f admin 70fae7298c364df6bd617e37e0df13e8 demo Passw0rd + local user_id=868279de31974cb4b30d0e34baca515f + local user_name=admin + local tenant_id=70fae7298c364df6bd617e37e0df13e8 + local tenant_name=demo + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 868279de31974cb4b30d0e34baca515f ++ grep -E '^\|[[:space:]]*(demo|70fae7298c364df6bd617e37e0df13e8)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 868279de31974cb4b30d0e34baca515f --tenant-id 70fae7298c364df6bd617e37e0df13e8 +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | 8fca7c9582514cf69bd50825c7a7263e | | secret | bdcde1c17a424b17b4d71196dc0e7041 | | tenant_id | 70fae7298c364df6bd617e37e0df13e8 | | trust_id | | | user_id | 868279de31974cb4b30d0e34baca515f | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 868279de31974cb4b30d0e34baca515f ++ grep -E '^\|[[:space:]]*(demo|70fae7298c364df6bd617e37e0df13e8)[[:space:]]*\|' ++ head -n 1 + line='| demo | 8fca7c9582514cf69bd50825c7a7263e | bdcde1c17a424b17b4d71196dc0e7041 |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' demo '|' 8fca7c9582514cf69bd50825c7a7263e '|' bdcde1c17a424b17b4d71196dc0e7041 '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/demo + local rcfile=/home/cloudbase/devstack/accrc/demo/admin + local ec2_cert=/home/cloudbase/devstack/accrc/demo/admin-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/demo/admin-pk.pem + '[' -e /home/cloudbase/devstack/accrc/demo/admin-pk.pem ']' + mv -f /home/cloudbase/devstack/accrc/demo/admin-pk.pem /home/cloudbase/devstack/accrc/demo/admin-pk.pem.old + '[' -e /home/cloudbase/devstack/accrc/demo/admin-cert.pem ']' + mv -f /home/cloudbase/devstack/accrc/demo/admin-cert.pem /home/cloudbase/devstack/accrc/demo/admin-cert.pem.old + nova --os-password Passw0rd --os-username admin --os-tenant-name demo x509-create-cert /home/cloudbase/devstack/accrc/demo/admin-pk.pem /home/cloudbase/devstack/accrc/demo/admin-cert.pem + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 8c7e8affb74940b19ec2c7e76832797d@demo ++ sed 's/@/ /' + '[' all = one -a demo '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 8c7e8affb74940b19ec2c7e76832797d demo 70fae7298c364df6bd617e37e0df13e8 demo Passw0rd + local user_id=8c7e8affb74940b19ec2c7e76832797d + local user_name=demo + local tenant_id=70fae7298c364df6bd617e37e0df13e8 + local tenant_name=demo + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 8c7e8affb74940b19ec2c7e76832797d ++ grep -E '^\|[[:space:]]*(demo|70fae7298c364df6bd617e37e0df13e8)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 8c7e8affb74940b19ec2c7e76832797d --tenant-id 70fae7298c364df6bd617e37e0df13e8 +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | 0866e8fa87b34f62a716c70f55ccb1db | | secret | 380ba3603c2943319880c4f19874f680 | | tenant_id | 70fae7298c364df6bd617e37e0df13e8 | | trust_id | | | user_id | 8c7e8affb74940b19ec2c7e76832797d | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 8c7e8affb74940b19ec2c7e76832797d ++ grep -E '^\|[[:space:]]*(demo|70fae7298c364df6bd617e37e0df13e8)[[:space:]]*\|' ++ head -n 1 + line='| demo | 0866e8fa87b34f62a716c70f55ccb1db | 380ba3603c2943319880c4f19874f680 |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' demo '|' 0866e8fa87b34f62a716c70f55ccb1db '|' 380ba3603c2943319880c4f19874f680 '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/demo + local rcfile=/home/cloudbase/devstack/accrc/demo/demo + local ec2_cert=/home/cloudbase/devstack/accrc/demo/demo-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/demo/demo-pk.pem + '[' -e /home/cloudbase/devstack/accrc/demo/demo-pk.pem ']' + mv -f /home/cloudbase/devstack/accrc/demo/demo-pk.pem /home/cloudbase/devstack/accrc/demo/demo-pk.pem.old + '[' -e /home/cloudbase/devstack/accrc/demo/demo-cert.pem ']' + mv -f /home/cloudbase/devstack/accrc/demo/demo-cert.pem /home/cloudbase/devstack/accrc/demo/demo-cert.pem.old + nova --os-password Passw0rd --os-username demo --os-tenant-name demo x509-create-cert /home/cloudbase/devstack/accrc/demo/demo-pk.pem /home/cloudbase/devstack/accrc/demo/demo-cert.pem + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo 3f496858f9b9412f8f3d1fe52c4640a2@invisible_to_admin ++ sed 's/@/ /' + echo ,service, + grep -q ,invisible_to_admin, ++ keystone user-list --tenant-id 3f496858f9b9412f8f3d1fe52c4640a2 ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ sed 's/@/ /' ++ echo 8c7e8affb74940b19ec2c7e76832797d@demo + '[' all = one -a demo '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 8c7e8affb74940b19ec2c7e76832797d demo 3f496858f9b9412f8f3d1fe52c4640a2 invisible_to_admin Passw0rd + local user_id=8c7e8affb74940b19ec2c7e76832797d + local user_name=demo + local tenant_id=3f496858f9b9412f8f3d1fe52c4640a2 + local tenant_name=invisible_to_admin + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 8c7e8affb74940b19ec2c7e76832797d ++ grep -E '^\|[[:space:]]*(invisible_to_admin|3f496858f9b9412f8f3d1fe52c4640a2)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 8c7e8affb74940b19ec2c7e76832797d --tenant-id 3f496858f9b9412f8f3d1fe52c4640a2 +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | 186ee006522c444592af6e8a40a407b6 | | secret | 817f9e21666445e5bff890e50fdd9753 | | tenant_id | 3f496858f9b9412f8f3d1fe52c4640a2 | | trust_id | | | user_id | 8c7e8affb74940b19ec2c7e76832797d | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 8c7e8affb74940b19ec2c7e76832797d ++ grep -E '^\|[[:space:]]*(invisible_to_admin|3f496858f9b9412f8f3d1fe52c4640a2)[[:space:]]*\|' ++ head -n 1 + line='| invisible_to_admin | 186ee006522c444592af6e8a40a407b6 | 817f9e21666445e5bff890e50fdd9753 |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' invisible_to_admin '|' 186ee006522c444592af6e8a40a407b6 '|' 817f9e21666445e5bff890e50fdd9753 '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/invisible_to_admin + local rcfile=/home/cloudbase/devstack/accrc/invisible_to_admin/demo + local ec2_cert=/home/cloudbase/devstack/accrc/invisible_to_admin/demo-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/invisible_to_admin/demo-pk.pem + '[' -e /home/cloudbase/devstack/accrc/invisible_to_admin/demo-pk.pem ']' + mv -f /home/cloudbase/devstack/accrc/invisible_to_admin/demo-pk.pem /home/cloudbase/devstack/accrc/invisible_to_admin/demo-pk.pem.old + '[' -e /home/cloudbase/devstack/accrc/invisible_to_admin/demo-cert.pem ']' + mv -f /home/cloudbase/devstack/accrc/invisible_to_admin/demo-cert.pem /home/cloudbase/devstack/accrc/invisible_to_admin/demo-cert.pem.old + nova --os-password Passw0rd --os-username demo --os-tenant-name invisible_to_admin x509-create-cert /home/cloudbase/devstack/accrc/invisible_to_admin/demo-pk.pem /home/cloudbase/devstack/accrc/invisible_to_admin/demo-cert.pem + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo a0ab893ec5894bde968f3d6d06537d17@service ++ sed 's/@/ /' + echo ,service, + grep -q ,service, + continue + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo c5afdf06f190483abdeced505563e007@swifttenanttest1 ++ sed 's/@/ /' + echo ,service, + grep -q ,swifttenanttest1, ++ keystone user-list --tenant-id c5afdf06f190483abdeced505563e007 ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 45f3baab2fea409abe817ee7d404ab64@swiftusertest1 ++ sed 's/@/ /' + '[' all = one -a swiftusertest1 '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 45f3baab2fea409abe817ee7d404ab64 swiftusertest1 c5afdf06f190483abdeced505563e007 swifttenanttest1 Passw0rd + local user_id=45f3baab2fea409abe817ee7d404ab64 + local user_name=swiftusertest1 + local tenant_id=c5afdf06f190483abdeced505563e007 + local tenant_name=swifttenanttest1 + local user_passwd=Passw0rd ++ grep -E '^\|[[:space:]]*(swifttenanttest1|c5afdf06f190483abdeced505563e007)[[:space:]]*\|' ++ keystone ec2-credentials-list --user_id 45f3baab2fea409abe817ee7d404ab64 ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 45f3baab2fea409abe817ee7d404ab64 --tenant-id c5afdf06f190483abdeced505563e007 +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | bf8202d67149461bbbcab2389ce600b3 | | secret | 3b22fc6032054893b56e206015f0dd0d | | tenant_id | c5afdf06f190483abdeced505563e007 | | trust_id | | | user_id | 45f3baab2fea409abe817ee7d404ab64 | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 45f3baab2fea409abe817ee7d404ab64 ++ grep -E '^\|[[:space:]]*(swifttenanttest1|c5afdf06f190483abdeced505563e007)[[:space:]]*\|' ++ head -n 1 + line='| swifttenanttest1 | bf8202d67149461bbbcab2389ce600b3 | 3b22fc6032054893b56e206015f0dd0d |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' swifttenanttest1 '|' bf8202d67149461bbbcab2389ce600b3 '|' 3b22fc6032054893b56e206015f0dd0d '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/swifttenanttest1 + local rcfile=/home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1 + local ec2_cert=/home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-pk.pem + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-pk.pem ']' + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-cert.pem ']' + nova --os-password Passw0rd --os-username swiftusertest1 --os-tenant-name swifttenanttest1 x509-create-cert /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-pk.pem /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-cert.pem ERROR (CommandError): Invalid OpenStack Nova credentials. + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-pk.pem.old ']' + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest1-cert.pem.old ']' + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 75aa805b3ed549fb8411bc06f70f103f@swiftusertest3 ++ sed 's/@/ /' + '[' all = one -a swiftusertest3 '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 75aa805b3ed549fb8411bc06f70f103f swiftusertest3 c5afdf06f190483abdeced505563e007 swifttenanttest1 Passw0rd + local user_id=75aa805b3ed549fb8411bc06f70f103f + local user_name=swiftusertest3 + local tenant_id=c5afdf06f190483abdeced505563e007 + local tenant_name=swifttenanttest1 + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 75aa805b3ed549fb8411bc06f70f103f ++ grep -E '^\|[[:space:]]*(swifttenanttest1|c5afdf06f190483abdeced505563e007)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 75aa805b3ed549fb8411bc06f70f103f --tenant-id c5afdf06f190483abdeced505563e007 +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | 05123f1f7bc844acbbc7696f71e7a6e3 | | secret | 4c846ab0beb843ebb786d0cf0fd0c434 | | tenant_id | c5afdf06f190483abdeced505563e007 | | trust_id | | | user_id | 75aa805b3ed549fb8411bc06f70f103f | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 75aa805b3ed549fb8411bc06f70f103f ++ grep -E '^\|[[:space:]]*(swifttenanttest1|c5afdf06f190483abdeced505563e007)[[:space:]]*\|' ++ head -n 1 + line='| swifttenanttest1 | 05123f1f7bc844acbbc7696f71e7a6e3 | 4c846ab0beb843ebb786d0cf0fd0c434 |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' swifttenanttest1 '|' 05123f1f7bc844acbbc7696f71e7a6e3 '|' 4c846ab0beb843ebb786d0cf0fd0c434 '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/swifttenanttest1 + local rcfile=/home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3 + local ec2_cert=/home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-pk.pem + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-pk.pem ']' + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-cert.pem ']' + nova --os-password Passw0rd --os-username swiftusertest3 --os-tenant-name swifttenanttest1 x509-create-cert /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-pk.pem /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-cert.pem ERROR (CommandError): Invalid OpenStack Nova credentials. + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-pk.pem.old ']' + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest1/swiftusertest3-cert.pem.old ']' + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + for tenant_id_at_name in '`keystone tenant-list | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|$/ {print $2 "@" $4}'\''`' + read tenant_id tenant_name ++ echo 5d1a99fbf32049fe8009477341e9f85c@swifttenanttest2 ++ sed 's/@/ /' + echo ,service, + grep -q ,swifttenanttest2, ++ keystone user-list --tenant-id 5d1a99fbf32049fe8009477341e9f85c ++ awk 'BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}' + for user_id_at_name in '`keystone user-list --tenant-id $tenant_id | awk '\''BEGIN {IGNORECASE = 1} /true[[:space:]]*\|[^|]*\|$/ {print $2 "@" $4}'\''`' + read user_id user_name ++ echo 2a82c4afaba74026a2aac56f46697206@swiftusertest2 ++ sed 's/@/ /' + '[' all = one -a swiftusertest2 '!=' admin ']' + eval 'SPECIFIC_UPASSWORD=$ADMIN_PASSWORD' ++ SPECIFIC_UPASSWORD= + '[' -n '' ']' + add_entry 2a82c4afaba74026a2aac56f46697206 swiftusertest2 5d1a99fbf32049fe8009477341e9f85c swifttenanttest2 Passw0rd + local user_id=2a82c4afaba74026a2aac56f46697206 + local user_name=swiftusertest2 + local tenant_id=5d1a99fbf32049fe8009477341e9f85c + local tenant_name=swifttenanttest2 + local user_passwd=Passw0rd ++ keystone ec2-credentials-list --user_id 2a82c4afaba74026a2aac56f46697206 ++ grep -E '^\|[[:space:]]*(swifttenanttest2|5d1a99fbf32049fe8009477341e9f85c)[[:space:]]*\|' ++ head -n 1 + local line= + '[' -z '' ']' + keystone ec2-credentials-create --user-id 2a82c4afaba74026a2aac56f46697206 --tenant-id 5d1a99fbf32049fe8009477341e9f85c +-----------+----------------------------------+ | Property | Value | +-----------+----------------------------------+ | access | 76efc61438d34efba89c0ca83165b88b | | secret | 325d838cce6044d8974021cfed76272f | | tenant_id | 5d1a99fbf32049fe8009477341e9f85c | | trust_id | | | user_id | 2a82c4afaba74026a2aac56f46697206 | +-----------+----------------------------------+ ++ keystone ec2-credentials-list --user_id 2a82c4afaba74026a2aac56f46697206 ++ grep -E '^\|[[:space:]]*(swifttenanttest2|5d1a99fbf32049fe8009477341e9f85c)[[:space:]]*\|' ++ head -n 1 + line='| swifttenanttest2 | 76efc61438d34efba89c0ca83165b88b | 325d838cce6044d8974021cfed76272f |' + local ec2_access_key ec2_secret_key + read ec2_access_key ec2_secret_key ++ echo '|' swifttenanttest2 '|' 76efc61438d34efba89c0ca83165b88b '|' 325d838cce6044d8974021cfed76272f '|' ++ awk '{print $4 " " $6 }' + mkdir -p /home/cloudbase/devstack/accrc/swifttenanttest2 + local rcfile=/home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2 + local ec2_cert=/home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-cert.pem + local ec2_private_key=/home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-pk.pem + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-pk.pem ']' + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-cert.pem ']' + nova --os-password Passw0rd --os-username swiftusertest2 --os-tenant-name swifttenanttest2 x509-create-cert /home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-pk.pem /home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-cert.pem ERROR (CommandError): Invalid OpenStack Nova credentials. + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-pk.pem.old ']' + '[' -e /home/cloudbase/devstack/accrc/swifttenanttest2/swiftusertest2-cert.pem.old ']' + cat + '[' -n yes ']' + echo 'export OS_PASSWORD="Passw0rd"' + is_service_enabled nova ++ set +o ++ grep xtrace + local 'xtrace=set -o xtrace' + set +o xtrace + return 0 + is_baremetal + [[ g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api =~ baremetal ]] + return 1 ++ date +%F-%H%M%S + CURRENT_RUN_TIME=2014-10-12-023909 + echo '# 2014-10-12-023909' + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo BASE_SQL_CONN=mysql://root:Passw0rd@127.0.0.1 + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo ENABLED_SERVICES=g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo HOST_IP=10.14.0.26 + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo LOGFILE= + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo SERVICE_HOST=10.14.0.26 + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo SERVICE_PROTOCOL=http + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo STACK_USER=cloudbase + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo TLS_IP= + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo KEYSTONE_AUTH_PROTOCOL=http + for i in BASE_SQL_CONN ENABLED_SERVICES HOST_IP LOGFILE SERVICE_HOST SERVICE_PROTOCOL STACK_USER TLS_IP KEYSTONE_AUTH_PROTOCOL OS_CACERT + echo OS_CACERT= + merge_config_group /home/cloudbase/devstack/local.conf extra + local localfile=/home/cloudbase/devstack/local.conf + shift + local matchgroups=extra + [[ -r /home/cloudbase/devstack/local.conf ]] + for group in '$matchgroups' ++ get_meta_section_files /home/cloudbase/devstack/local.conf extra ++ local file=/home/cloudbase/devstack/local.conf ++ local matchgroup=extra ++ [[ -r /home/cloudbase/devstack/local.conf ]] ++ awk -v matchgroup=extra ' /^\[\[.+\|.*\]\]/ { gsub("[][]", "", $1); split($1, a, "|"); if (a[1] == matchgroup) print a[2] } ' /home/cloudbase/devstack/local.conf + [[ -d /home/cloudbase/devstack/extras.d ]] + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/50-ironic.sh ]] + source /home/cloudbase/devstack/extras.d/50-ironic.sh stack extra ++ is_service_enabled ir-api ir-cond +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-gantt.sh ]] + source /home/cloudbase/devstack/extras.d/70-gantt.sh stack extra ++ is_service_enabled n-sch +++ grep xtrace +++ set +o ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ disable_service gantt ++ local tmpsvcs=,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, ++ local service ++ for service in '$@' ++ is_service_enabled gantt +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 +++ _cleanup_service_list ,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, +++ echo ,g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api, +++ sed -e ' s/,,/,/g; s/^,//; s/,$// ' ++ ENABLED_SERVICES=g-api,g-reg,key,n-api,n-crt,n-obj,n-cond,n-sch,n-xvnc,n-cauth,c-sch,c-api,c-vol,h-eng,h-api,h-api-cfn,h-api-cw,rabbit,tempest,mysql,neutron,q-svc,q-agt,q-dhcp,q-l3,q-meta,q-lbaas,q-fwaas,q-metering,q-vpn,c-bak,s-proxy,s-object,s-container,s-account,heat,ceilometer-acentral,ceilometer-collector,ceilometer-api ++ is_service_enabled gantt +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-marconi.sh ]] + source /home/cloudbase/devstack/extras.d/70-marconi.sh stack extra ++ is_service_enabled marconi-server +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-sahara.sh ]] + source /home/cloudbase/devstack/extras.d/70-sahara.sh stack extra ++ is_service_enabled sahara +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/70-trove.sh ]] + source /home/cloudbase/devstack/extras.d/70-trove.sh stack extra ++ is_service_enabled trove +++ grep xtrace +++ set +o ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/80-opendaylight.sh ]] + source /home/cloudbase/devstack/extras.d/80-opendaylight.sh stack extra ++ is_service_enabled odl-server odl-compute +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ is_service_enabled odl-server +++ grep xtrace +++ set +o ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ is_service_enabled odl-compute +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 + for i in '$TOP_DIR/extras.d/*.sh' + [[ -r /home/cloudbase/devstack/extras.d/80-tempest.sh ]] + source /home/cloudbase/devstack/extras.d/80-tempest.sh stack extra ++ is_service_enabled tempest +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ [[ stack == \s\o\u\r\c\e ]] ++ [[ stack == \s\t\a\c\k ]] ++ [[ extra == \i\n\s\t\a\l\l ]] ++ [[ stack == \s\t\a\c\k ]] ++ [[ extra == \p\o\s\t\-\c\o\n\f\i\g ]] ++ [[ stack == \s\t\a\c\k ]] ++ [[ extra == \e\x\t\r\a ]] ++ echo_summary 'Initializing Tempest' ++ [[ -t 3 ]] ++ echo -e Initializing Tempest ++ configure_tempest ++ setup_develop /opt/stack/tempest ++ local project_dir=/opt/stack/tempest ++ setup_package_with_req_sync /opt/stack/tempest -e ++ local project_dir=/opt/stack/tempest ++ local flags=-e +++ cd /opt/stack/tempest +++ git diff --exit-code +++ echo changed ++ local update_requirements=changed ++ [[ changed != \c\h\a\n\g\e\d ]] ++ setup_package /opt/stack/tempest -e ++ local project_dir=/opt/stack/tempest ++ local flags=-e ++ pip_install -e /opt/stack/tempest +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ sudo PIP_DOWNLOAD_CACHE=/var/cache/pip HTTP_PROXY= HTTPS_PROXY= NO_PROXY= /usr/local/bin/pip install --build=/tmp/pip-build.mIwnl -e /opt/stack/tempest ++ sudo rm -rf /tmp/pip-build.mIwnl ++ [[ -e == \-\e ]] ++ safe_chown -R cloudbase /opt/stack/tempest/tempest.egg-info ++ _safe_permission_operation chown -R cloudbase /opt/stack/tempest/tempest.egg-info +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ sudo chown -R cloudbase /opt/stack/tempest/tempest.egg-info ++ '[' True = True ']' ++ [[ changed != \c\h\a\n\g\e\d ]] ++ local image_lines ++ local images ++ local num_images ++ local image_uuid ++ local image_uuid_alt ++ local errexit ++ local password ++ local line ++ local flavors ++ local available_flavors ++ local flavors_ref ++ local flavor_lines ++ local public_network_id ++ local public_router_id ++ local tenant_networks_reachable ++ local boto_instance_type=m1.tiny ++ local ssh_connect_method=fixed +++ set +o +++ grep errexit ++ errexit='set +o errexit' ++ set -o errexit ++ ifs=' ' ++ declare -a images ++ read -r IMAGE_NAME IMAGE_UUID +++ glance image-list --status=active +++ awk '-F|' '!/^(+--)|ID|aki|ari/ { print $3,$2 }' ++ '[' cirros-0 = cirros-0.3.1-x86_64-uec ']' ++ images+=($IMAGE_UUID) ++ read -r IMAGE_NAME IMAGE_UUID ++ case "${#images[*]}" in ++ '[' -z '' ']' ++ image_uuid=a336c645-146b-4d17-bdcf-8509607e178c ++ image_uuid_alt=a336c645-146b-4d17-bdcf-8509607e178c ++ [[ ! -d /opt/stack/tempest/etc ]] ++ sudo chown cloudbase /opt/stack/tempest/etc ++ cp /opt/stack/tempest/etc/tempest.conf.sample /opt/stack/tempest/etc/tempest.conf ++ chmod 644 /opt/stack/tempest/etc/tempest.conf ++ password=Passw0rd ++ ADMIN_USERNAME=admin ++ ADMIN_TENANT_NAME=admin ++ ADMIN_DOMAIN_NAME=Default ++ TEMPEST_USERNAME=demo ++ TEMPEST_TENANT_NAME=demo ++ ALT_USERNAME=alt_demo ++ ALT_TENANT_NAME=alt_demo ++ [[ -z '' ]] +++ nova flavor-list ++ available_flavors='+----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | ID | Name | Memory_MB | Disk | Ephemeral | Swap_MB | VCPUs | RXTX_Factor | Is_Public | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | 1 | m1.tiny | 512 | 1 | 0 | | 1 | 1.0 | True | | 2 | m1.small | 2048 | 20 | 0 | | 1 | 1.0 | True | | 3 | m1.medium | 4096 | 40 | 0 | | 2 | 1.0 | True | | 4 | m1.large | 8192 | 80 | 0 | | 4 | 1.0 | True | | 5 | m1.xlarge | 16384 | 160 | 0 | | 8 | 1.0 | True | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+' ++ [[ +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | ID | Name | Memory_MB | Disk | Ephemeral | Swap_MB | VCPUs | RXTX_Factor | Is_Public | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | 1 | m1.tiny | 512 | 1 | 0 | | 1 | 1.0 | True | | 2 | m1.small | 2048 | 20 | 0 | | 1 | 1.0 | True | | 3 | m1.medium | 4096 | 40 | 0 | | 2 | 1.0 | True | | 4 | m1.large | 8192 | 80 | 0 | | 4 | 1.0 | True | | 5 | m1.xlarge | 16384 | 160 | 0 | | 8 | 1.0 | True | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ =~ m1\.nano ]] ++ is_arch ppc64 ++ ARCH_TYPE=ppc64 +++ uname -m ++ [[ x86_64 == \p\p\c\6\4 ]] ++ nova flavor-create m1.nano 42 64 0 1 ++ flavor_ref=42 ++ boto_instance_type=m1.nano ++ [[ +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | ID | Name | Memory_MB | Disk | Ephemeral | Swap_MB | VCPUs | RXTX_Factor | Is_Public | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | 1 | m1.tiny | 512 | 1 | 0 | | 1 | 1.0 | True | | 2 | m1.small | 2048 | 20 | 0 | | 1 | 1.0 | True | | 3 | m1.medium | 4096 | 40 | 0 | | 2 | 1.0 | True | | 4 | m1.large | 8192 | 80 | 0 | | 4 | 1.0 | True | | 5 | m1.xlarge | 16384 | 160 | 0 | | 8 | 1.0 | True | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ =~ m1\.micro ]] ++ is_arch ppc64 ++ ARCH_TYPE=ppc64 +++ uname -m ++ [[ x86_64 == \p\p\c\6\4 ]] ++ nova flavor-create m1.micro 84 128 0 1 ++ flavor_ref_alt=84 ++ '[' True '!=' False ']' ++ tenant_networks_reachable=false ++ is_service_enabled n-net +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ ssh_connect_method=floating ++ ssh_connect_method=floating ++ '[' True = True ']' +++ neutron net-list +++ grep public +++ awk '{print $2}' ++ public_network_id=374a7629-e83d-4ac4-b6b8-083ac2bc2de1 ++ '[' True == False ']' ++ iniset /opt/stack/tempest/etc/tempest.conf DEFAULT lock_path /opt/stack/data/tempest +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ mkdir -p /opt/stack/data/tempest ++ iniset /opt/stack/tempest/etc/tempest.conf DEFAULT use_stderr False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf DEFAULT log_file tempest.log +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf DEFAULT debug True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute build_timeout 196 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf volume build_timeout 196 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto build_timeout 196 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute build_interval 1 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf volume build_interval 1 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto build_interval 1 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto http_socket_timeout 5 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity uri http://10.14.0.26:5000/v2.0/ +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity uri_v3 http://10.14.0.26:5000/v3/ +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity username demo +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity password Passw0rd +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity tenant_name demo +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity alt_username alt_demo +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity alt_password Passw0rd +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity alt_tenant_name alt_demo +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity admin_username admin +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity admin_password Passw0rd +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity admin_tenant_name admin +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity admin_domain_name Default +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf identity auth_version v2 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ [[ ! -z '' ]] ++ iniset /opt/stack/tempest/etc/tempest.conf compute allow_tenant_isolation True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute ssh_user cirros +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute network_for_ssh private +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute ip_version_for_ssh 4 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute ssh_timeout 196 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute image_ref a336c645-146b-4d17-bdcf-8509607e178c +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute image_ssh_user cirros +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute image_ref_alt a336c645-146b-4d17-bdcf-8509607e178c +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute image_alt_ssh_user cirros +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute flavor_ref 42 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute flavor_ref_alt 84 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute ssh_connect_method floating +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-feature-enabled resize True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-feature-enabled live_migration False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-feature-enabled change_password False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-feature-enabled block_migration_for_live_migration False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-admin username admin +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-admin password Passw0rd +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf compute-admin tenant_name admin +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network api_version 2.0 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network tenant_networks_reachable false +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network public_network_id 374a7629-e83d-4ac4-b6b8-083ac2bc2de1 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network public_router_id '' +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network default_network 10.0.0.0/24 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network-feature-enabled ipv6 True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto ec2_url http://10.14.0.26:8773/services/Cloud +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto s3_url http://10.14.0.26:3333 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto s3_materials_path /home/cloudbase/devstack/files/images/s3-materials/cirros-0.3.1 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto ari_manifest cirros-0.3.1-x86_64-initrd.manifest.xml +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto ami_manifest cirros-0.3.1-x86_64-blank.img.manifest.xml +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto aki_manifest cirros-0.3.1-x86_64-vmlinuz.manifest.xml +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto instance_type m1.nano +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto http_socket_timeout 30 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf boto ssh_user cirros +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ is_service_enabled heat +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ [[ ! -z '' ]] +++ nova flavor-list ++ available_flavors='+----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | ID | Name | Memory_MB | Disk | Ephemeral | Swap_MB | VCPUs | RXTX_Factor | Is_Public | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | 1 | m1.tiny | 512 | 1 | 0 | | 1 | 1.0 | True | | 2 | m1.small | 2048 | 20 | 0 | | 1 | 1.0 | True | | 3 | m1.medium | 4096 | 40 | 0 | | 2 | 1.0 | True | | 4 | m1.large | 8192 | 80 | 0 | | 4 | 1.0 | True | | 42 | m1.nano | 64 | 0 | 0 | | 1 | 1.0 | True | | 5 | m1.xlarge | 16384 | 160 | 0 | | 8 | 1.0 | True | | 84 | m1.micro | 128 | 0 | 0 | | 1 | 1.0 | True | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+' ++ [[ +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | ID | Name | Memory_MB | Disk | Ephemeral | Swap_MB | VCPUs | RXTX_Factor | Is_Public | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ | 1 | m1.tiny | 512 | 1 | 0 | | 1 | 1.0 | True | | 2 | m1.small | 2048 | 20 | 0 | | 1 | 1.0 | True | | 3 | m1.medium | 4096 | 40 | 0 | | 2 | 1.0 | True | | 4 | m1.large | 8192 | 80 | 0 | | 4 | 1.0 | True | | 42 | m1.nano | 64 | 0 | 0 | | 1 | 1.0 | True | | 5 | m1.xlarge | 16384 | 160 | 0 | | 8 | 1.0 | True | | 84 | m1.micro | 128 | 0 | 0 | | 1 | 1.0 | True | +----+-----------+-----------+------+-----------+---------+-------+-------------+-----------+ =~ m1\.heat ]] ++ nova flavor-create m1.heat 451 512 0 1 ++ iniset /opt/stack/tempest/etc/tempest.conf orchestration instance_type m1.heat +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf orchestration build_timeout 900 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf scenario img_dir /home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf scenario large_ops_number 0 +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ is_service_enabled c-bak +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 +++ trueorfalse False False ++++ set +o ++++ grep xtrace +++ local 'xtrace=set -o xtrace' +++ set +o xtrace ++ CINDER_MULTI_LVM_BACKEND=False ++ '[' False == True ']' ++ '[' default '!=' default ']' ++ iniset /opt/stack/tempest/etc/tempest.conf dashboard dashboard_url http://10.14.0.26/ +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf dashboard login_url http://10.14.0.26/auth/login/ +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf cli cli_dir /usr/local/bin +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ iniset /opt/stack/tempest/etc/tempest.conf network-feature-enabled api_extensions all +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ '[' libvirt = ironic ']' ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled horizon +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 1 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available horizon False +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled glance +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available glance True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled nova +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available nova True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled cinder +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available cinder True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled swift +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available swift True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled ceilometer +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available ceilometer True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled heat +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available heat True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ for service in '${TEMPEST_SERVICES//,/ }' ++ is_service_enabled neutron +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ return 0 ++ iniset /opt/stack/tempest/etc/tempest.conf service_available neutron True +++ set +o +++ grep xtrace ++ local 'xtrace=set -o xtrace' ++ set +o xtrace ++ IFS=' ' ++ set +o errexit ++ init_tempest ++ local base_image_name=cirros-0.3.1-x86_64 ++ local image_dir=/home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec ++ local kernel=/home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec/cirros-0.3.1-x86_64-vmlinuz ++ local ramdisk=/home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec/cirros-0.3.1-x86_64-initrd ++ local disk_image=/home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec/cirros-0.3.1-x86_64-blank.img ++ '[' -f /home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec/cirros-0.3.1-x86_64-vmlinuz -a -f /home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec/cirros-0.3.1-x86_64-initrd -a -f /home/cloudbase/devstack/files/images/cirros-0.3.1-x86_64-uec/cirros-0.3.1-x86_64-blank.img -a libvirt '!=' openvz -a '(' kvm '!=' lxc -o libvirt '!=' libvirt ')' ']' ++ echo 'Prepare aki/ari/ami Images' ++ cat ++ [[ stack == \u\n\s\t\a\c\k ]] ++ [[ stack == \c\l\e\a\n ]] + merge_config_group /home/cloudbase/devstack/local.conf post-extra + local localfile=/home/cloudbase/devstack/local.conf + shift + local matchgroups=post-extra + [[ -r /home/cloudbase/devstack/local.conf ]] + for group in '$matchgroups' ++ get_meta_section_files /home/cloudbase/devstack/local.conf post-extra ++ local file=/home/cloudbase/devstack/local.conf ++ local matchgroup=post-extra ++ [[ -r /home/cloudbase/devstack/local.conf ]] ++ awk -v matchgroup=post-extra ' /^\[\[.+\|.*\]\]/ { gsub("[][]", "", $1); split($1, a, "|"); if (a[1] == matchgroup) print a[2] } ' /home/cloudbase/devstack/local.conf + [[ -x /home/cloudbase/devstack/local.sh ]] + echo 'Running user script /home/cloudbase/devstack/local.sh' + /home/cloudbase/devstack/local.sh + service_check + local service + local failures + SCREEN_NAME=stack + SERVICE_DIR=/opt/stack/status + [[ ! -d /opt/stack/status/stack ]] ++ ls '/opt/stack/status/stack/*.failure' ++ /bin/true + failures= + '[' -n '' ']' + set +o xtrace