]> review.fuel-infra Code Review - tools/sustaining.git/commitdiff
Add python validation scripts 81/12681/3
authorvrovachev <vrovachev@mirantis.com>
Fri, 9 Oct 2015 11:34:01 +0000 (14:34 +0300)
committervrovachev <vrovachev@mirantis.com>
Wed, 14 Oct 2015 10:31:00 +0000 (13:31 +0300)
Added python script for bug 1393376
Added python script for bug 1486682
Added bash script for bug 1487450

Change-Id: Ia0272d3beb6c55cb5bc10592b13d33564db0454e

verification_scripts/6.1-mu-3/bug_1393376.py [new file with mode: 0644]
verification_scripts/6.1-mu-3/bug_1486682.py [new file with mode: 0644]
verification_scripts/6.1-mu-3/bug_1487450.sh [new file with mode: 0644]

diff --git a/verification_scripts/6.1-mu-3/bug_1393376.py b/verification_scripts/6.1-mu-3/bug_1393376.py
new file mode 100644 (file)
index 0000000..aa2b691
--- /dev/null
@@ -0,0 +1,132 @@
+import time
+
+import unittest
+
+import keystoneclient
+import neutronclient.neutron.client
+import novaclient.client
+import heatclient.v1.client
+
+URL = 'http://192.168.0.2:5000/v2.0/'
+
+NAME = "test_1393376"
+
+TMPL1 = """
+heat_template_version: 2013-05-23
+
+description: Sample Stack
+
+resources:
+
+  instance-port:
+    type: OS::Neutron::Port
+    properties:
+      network_id: {net_id}
+
+  instance:
+    type: OS::Nova::Server
+    properties:
+      flavor: m1.micro
+      image: TestVM
+      networks:
+        - port : { get_resource : instance-port }
+"""
+
+TMPL2 = """
+heat_template_version: 2013-05-23
+
+description: Sample Stack
+
+resources:
+
+  instance-port:
+    type: OS::Neutron::Port
+    properties:
+      network_id: {net_id}
+
+  instance:
+    type: OS::Nova::Server
+    properties:
+      flavor: m1.micro
+      image: TestVM
+      networks:
+        - port : { get_resource : instance-port }
+
+  instance2-port:
+    type: OS::Neutron::Port
+    properties:
+      network_id: {net_id}
+
+  instance2:
+    type: OS::Nova::Server
+    properties:
+      flavor: m1.micro
+      image: TestVM
+      networks:
+        - port : { get_resource : instance2-port }
+"""
+
+
+class TestHeatBug1393376(unittest.TestCase):
+
+    @staticmethod
+    def wait_to_status(cli, stack_id, expected_status='CREATE_COMPLETE'):
+        timeout = 5 * 60
+        start = int(time.time())
+        status = cli.stacks.get(stack_id).stack_status
+        while status != expected_status:
+            if status in ["CREATE_FAILED", "UPDATE_FAILED"]:
+                raise StandardError("Heat stack has FAILED state.")
+            time.sleep(10)
+            status = cli.stacks.get(stack_id).stack_status
+            if int(time.time()) - start >= timeout:
+                raise RuntimeError(
+                    "Heat stack has {} state after 5 minutes, but expected "
+                    "status:{}".format(status, expected_status))
+
+    def setUp(self):
+        keystone = keystoneclient.v2_0.client.Client(
+            username='admin',password='admin',tenant_name='admin',auth_url=URL)
+        n_endpoint = keystone.service_catalog.url_for(
+            service_type='network',endpoint_type='publicURL')
+        self.neutron = neutronclient.neutron.client.Client(
+            '2.0',token=keystone.auth_token,endpoint_url=n_endpoint)
+        h_endpoint = keystone.service_catalog.url_for(
+            service_type='orchestration',endpoint_type='publicURL')
+        self.heat = heatclient.v1.client.Client(h_endpoint,
+                                                token=keystone.auth_token)
+        self.nova = novaclient.client.Client(
+            '2', 'admin', 'admin', 'admin', URL, service_type='compute',
+            no_cache=True)
+        private_nets = self.neutron.list_networks(
+            **{"router:external":False})['networks']
+        if not private_nets:
+            self.fail("Private network not found")
+        self.net = private_nets[0]['id']
+        self.stack = None
+
+    def tearDown(self):
+        if self.stack:
+            self.heat.stacks.delete(self.stack.id)
+
+    def test_1393376(self):
+        self.heat.stacks.create(stack_name=NAME,
+                                template=TMPL1.replace("{net_id}", self.net))
+        self.stack = [i for i in self.heat.stacks.list()
+                      if i.stack_name == NAME][0]
+        self.wait_to_status(self.heat, self.stack.id)
+        self.heat.stacks.update(self.stack.id,
+                                template=TMPL2.replace("{net_id}", self.net))
+        self.wait_to_status(self.heat, self.stack.id,
+                            expected_status="UPDATE_COMPLETE")
+        stack_instances = [i.id for i in self.nova.servers.list()
+                           if i.name.startswith(NAME)]
+        for instance in stack_instances:
+            if not self.neutron.list_ports(device_id=instance)['ports']:
+                self.fail("Neutron port for instance:{} not found. "
+                          "Bug #1393376 reproduced. FAIL.".format(instance))
+        print "Yeah!!! Bug #1393376 not reproduced. Fix works."
+
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/verification_scripts/6.1-mu-3/bug_1486682.py b/verification_scripts/6.1-mu-3/bug_1486682.py
new file mode 100644 (file)
index 0000000..699f1c9
--- /dev/null
@@ -0,0 +1,86 @@
+import time
+
+import unittest
+
+import cinderclient.client
+import glanceclient.client
+import keystoneclient
+
+
+PROP = {"hw_disk_bus": "ide",
+        "hw_vif_model": "rtl8139"}
+
+URL = 'http://192.168.0.2:5000/v2.0/'
+
+
+class TestCinderBug1486682(unittest.TestCase):
+
+    @staticmethod
+    def add_properties(cli, image_id):
+        for prop, val in PROP.iteritems():
+            cli.image_tags.update(image_id, prop)
+            tag = {prop: val}
+            cli.images.update(image_id, **tag)
+
+    @staticmethod
+    def delete_properties(cli, image_id):
+        list_props_to_delete = []
+        for prop, _ in PROP.iteritems():
+            list_props_to_delete.append(prop)
+            cli.image_tags.delete(image_id, prop)
+        cli.images.update(image_id, list_props_to_delete)
+
+    @staticmethod
+    def wait_to_status(cli, obj_id, expected_status='available'):
+        timeout = 5 * 60
+        start = int(time.time())
+        status = cli.get(obj_id).status.lower()
+        while status != expected_status:
+            if status in ["error"]:
+                raise StandardError("Object has error state.")
+            time.sleep(10)
+            status = cli.get(obj_id).status.lower()
+            if int(time.time()) - start >= timeout:
+                raise RuntimeError(
+                    "Object has {} state after 5 minutes, but expected "
+                    "status:{}".format(status, expected_status))
+
+    def setUp(self):
+        keystone = keystoneclient.v2_0.client.Client(
+            username='admin',password='admin',tenant_name='admin',
+            auth_url=URL)
+        endpoint = keystone.service_catalog.url_for(service_type='image',
+                                                    endpoint_type='publicURL')
+        self.glance = glanceclient.client.Client(
+            '2', endpoint, token=keystone.auth_token)
+        self.cinder = cinderclient.client.Client(
+            '1', 'admin', 'admin', 'admin', URL)
+        images = [image['id'] for image in self.glance.images.list()
+                  if image['name'] == "TestVM"]
+        if not images:
+            self.fail("image with name TestVM not found")
+        self.image = images[0]
+        self.vol = None
+        self.add_properties(self.glance, self.image)
+
+    def tearDown(self):
+        self.delete_properties(self.glance, self.image)
+        if self.vol:
+            self.cinder.volumes.force_delete(self.vol)
+
+    def test_1486682(self):
+        self.vol = self.cinder.volumes.create(1, imageRef=self.image)
+        self.wait_to_status(self.cinder.volumes, self.vol.id)
+        print ("image dict")
+        print self.glance.images.get(self.image)
+        meta = self.cinder.volumes.get(self.vol.id).volume_image_metadata
+        print ("volume image meta")
+        print meta
+        for tag, val in PROP.iteritems():
+            if not meta.get(tag) == val:
+                self.fail("Volume created from image not contains "
+                          "meta:{} with value:{}".format(tag, val))
+        print ("Yeah!!! Patch for bug:#1486682 works!!!")
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/verification_scripts/6.1-mu-3/bug_1487450.sh b/verification_scripts/6.1-mu-3/bug_1487450.sh
new file mode 100644 (file)
index 0000000..24f388c
--- /dev/null
@@ -0,0 +1,26 @@
+#!/bin/bash -ex
+. openrc
+CHECK_MESSAGE="password"
+echo ${CHECK_MESSAGE} > secret
+swift upload private secret
+swift post -H 'x-account-meta-temp-url-key: mykey'
+swift post public
+URL=$(echo ${OS_AUTH_URL} | sed -e "s|:5000/v2.0/||")
+TENANT_ID=$(keystone tenant-get $OS_USERNAME | awk '/id/ {print $4}')
+PUT_TEMPURL_SIG="$(swift tempurl PUT 60 /v1/AUTH_${TENANT_ID}/public/your-thing mykey)"
+curl -i -XPUT ${URL}:8080${PUT_TEMPURL_SIG} -H'x-object-manifest: private/secret' -H'Content-Length: 0'
+GET_TEMPURL_SIG="$(swift tempurl GET 60 /v1/AUTH_${TENANT_ID}/public/your-thing mykey)"
+OUTPUT=$(curl -i ${URL}:8080${GET_TEMPURL_SIG} 2>&1 | tail -1)
+swift delete private &>/dev/null
+swift delete public &>/dev/null
+rm secret
+set +x
+
+echo -e "\n\n\n------- Check output -------\n\n\n"
+
+if [[ ${CHECK_MESSAGE} == ${OUTPUT} ]]; then
+    echo -e "Bug reproduced. exit 1\n\n\n-------"
+    exit 1
+else
+    echo -e "Bug not reproduced\n\n\n-------"
+fi
\ No newline at end of file