Add missing api example for incremental backups
* Added Incremental backup test and resulting sample files. * Fixed the example generator tests to actually fail when api changes. * Added changes since the last example generation was run. * Added a --fix-examples flag to the example generator to automatically correct the sample files. Change-Id: I7ac355c80b251b0eccd3bd5b8d76d2287c255705 Closes-Bug: #1398119
This commit is contained in:
parent
5f252e3bc5
commit
0a601ae45c
@ -0,0 +1,7 @@
|
|||||||
|
POST /v1.0/1234/backups HTTP/1.1
|
||||||
|
User-Agent: python-troveclient
|
||||||
|
Host: troveapi.org
|
||||||
|
X-Auth-Token: 87c6033c-9ff6-405f-943e-2deb73f278b7
|
||||||
|
Accept: application/json
|
||||||
|
Content-Type: application/json
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"backup": {
|
||||||
|
"description": "My Incremental Backup",
|
||||||
|
"instance": "44b277eb-39be-4921-be31-3d61b43651d7",
|
||||||
|
"name": "Incremental Snapshot",
|
||||||
|
"parent_id": "a9832168-7541-4536-b8d9-a8a9b79cf1b4"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -0,0 +1,5 @@
|
|||||||
|
HTTP/1.1 202 Accepted
|
||||||
|
Content-Type: application/json
|
||||||
|
Content-Length: 462
|
||||||
|
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
||||||
|
|
@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"backup": {
|
||||||
|
"created": "2014-10-30T12:30:00",
|
||||||
|
"datastore": {
|
||||||
|
"type": "mysql",
|
||||||
|
"version": "5.5",
|
||||||
|
"version_id": "b00000b0-00b0-0b00-00b0-000b000000bb"
|
||||||
|
},
|
||||||
|
"description": "My Incremental Backup",
|
||||||
|
"id": "2e351a71-dd28-4bcb-a7d6-d36a5b487173",
|
||||||
|
"instance_id": "44b277eb-39be-4921-be31-3d61b43651d7",
|
||||||
|
"locationRef": null,
|
||||||
|
"name": "Incremental Snapshot",
|
||||||
|
"parent_id": "a9832168-7541-4536-b8d9-a8a9b79cf1b4",
|
||||||
|
"size": null,
|
||||||
|
"status": "NEW",
|
||||||
|
"updated": "2014-10-30T12:30:00"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -12,7 +12,7 @@
|
|||||||
"locationRef": "http://localhost/path/to/backup",
|
"locationRef": "http://localhost/path/to/backup",
|
||||||
"name": "snapshot",
|
"name": "snapshot",
|
||||||
"parent_id": null,
|
"parent_id": null,
|
||||||
"size": null,
|
"size": 0.14,
|
||||||
"status": "COMPLETED",
|
"status": "COMPLETED",
|
||||||
"updated": "2014-10-30T12:30:00"
|
"updated": "2014-10-30T12:30:00"
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
"locationRef": "http://localhost/path/to/backup",
|
"locationRef": "http://localhost/path/to/backup",
|
||||||
"name": "snapshot",
|
"name": "snapshot",
|
||||||
"parent_id": null,
|
"parent_id": null,
|
||||||
"size": null,
|
"size": 0.14,
|
||||||
"status": "COMPLETED",
|
"status": "COMPLETED",
|
||||||
"updated": "2014-10-30T12:30:00"
|
"updated": "2014-10-30T12:30:00"
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@
|
|||||||
"locationRef": "http://localhost/path/to/backup",
|
"locationRef": "http://localhost/path/to/backup",
|
||||||
"name": "snapshot",
|
"name": "snapshot",
|
||||||
"parent_id": null,
|
"parent_id": null,
|
||||||
"size": null,
|
"size": 0.14,
|
||||||
"status": "COMPLETED",
|
"status": "COMPLETED",
|
||||||
"updated": "2014-10-30T12:30:00"
|
"updated": "2014-10-30T12:30:00"
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
HTTP/1.1 200 OK
|
HTTP/1.1 200 OK
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
Content-Length: 154
|
Content-Length: 149
|
||||||
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "collation_server",
|
"name": "collation_server",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
HTTP/1.1 200 OK
|
HTTP/1.1 200 OK
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
Content-Length: 154
|
Content-Length: 149
|
||||||
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "collation_server",
|
"name": "collation_server",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "string"
|
"type": "string"
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
HTTP/1.1 200 OK
|
HTTP/1.1 200 OK
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
Content-Length: 1085
|
Content-Length: 1030
|
||||||
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
||||||
|
|
||||||
|
@ -2,47 +2,47 @@
|
|||||||
"configuration-parameters": [
|
"configuration-parameters": [
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "collation_server",
|
"name": "collation_server",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "65535",
|
"max": "65535",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "connect_timeout",
|
"name": "connect_timeout",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "57671680",
|
"max": "57671680",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "innodb_buffer_pool_size",
|
"name": "innodb_buffer_pool_size",
|
||||||
"restart_required": true,
|
"restart_required": true,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "4294967296",
|
"max": "4294967296",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "join_buffer_size",
|
"name": "join_buffer_size",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "4294967296",
|
"max": "4294967296",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "key_buffer_size",
|
"name": "key_buffer_size",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "1",
|
"max": "1",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "local_infile",
|
"name": "local_infile",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
HTTP/1.1 200 OK
|
HTTP/1.1 200 OK
|
||||||
Content-Type: application/json
|
Content-Type: application/json
|
||||||
Content-Length: 1085
|
Content-Length: 1030
|
||||||
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
Date: Mon, 18 Mar 2013 19:09:17 GMT
|
||||||
|
|
||||||
|
@ -2,47 +2,47 @@
|
|||||||
"configuration-parameters": [
|
"configuration-parameters": [
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "collation_server",
|
"name": "collation_server",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "65535",
|
"max": "65535",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "connect_timeout",
|
"name": "connect_timeout",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "57671680",
|
"max": "57671680",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "innodb_buffer_pool_size",
|
"name": "innodb_buffer_pool_size",
|
||||||
"restart_required": true,
|
"restart_required": true,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "4294967296",
|
"max": "4294967296",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "join_buffer_size",
|
"name": "join_buffer_size",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "4294967296",
|
"max": "4294967296",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "key_buffer_size",
|
"name": "key_buffer_size",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
"datastore_version_id": "b00000b0-00b0-0b00-00b0-000b000000bb",
|
||||||
"max_size": "1",
|
"max": "1",
|
||||||
"min_size": "0",
|
"min": "0",
|
||||||
"name": "local_infile",
|
"name": "local_infile",
|
||||||
"restart_required": false,
|
"restart_required": false,
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
|
@ -6,7 +6,7 @@
|
|||||||
],
|
],
|
||||||
"links": [
|
"links": [
|
||||||
{
|
{
|
||||||
"href": "https://troveapi.org/v1.0/1234/instances/44b277eb-39be-4921-be31-3d61b43651d7/databases?marker=anotherdb&limit=1",
|
"href": "https://troveapi.org/v1.0/1234/instances/44b277eb-39be-4921-be31-3d61b43651d7/databases?limit=1&marker=anotherdb",
|
||||||
"rel": "next"
|
"rel": "next"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"links": [
|
"links": [
|
||||||
{
|
{
|
||||||
"href": "https://troveapi.org/v1.0/1234/instances/44b277eb-39be-4921-be31-3d61b43651d7/users?marker=dbuser2%2540%2525&limit=2",
|
"href": "https://troveapi.org/v1.0/1234/instances/44b277eb-39be-4921-be31-3d61b43651d7/users?limit=2&marker=dbuser2%2540%2525",
|
||||||
"rel": "next"
|
"rel": "next"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"hosts": [
|
"hosts": [
|
||||||
{
|
|
||||||
"instanceCount": 0,
|
|
||||||
"name": "hostname_2"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"instanceCount": 1,
|
"instanceCount": 1,
|
||||||
"name": "hostname_1"
|
"name": "hostname_1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"instanceCount": 0,
|
||||||
|
"name": "hostname_2"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
31
generate_examples.py
Normal file → Executable file
31
generate_examples.py
Normal file → Executable file
@ -1,4 +1,24 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
# Copyright 2014 OpenStack Foundation
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
|
||||||
import run_tests
|
import run_tests
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
def import_tests():
|
def import_tests():
|
||||||
@ -7,4 +27,15 @@ def import_tests():
|
|||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
parser = argparse.ArgumentParser(description='Generate Example Snippets')
|
||||||
|
parser.add_argument('--fix-examples', action='store_true',
|
||||||
|
help='Fix the examples rather than failing tests.')
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
if args.fix_examples:
|
||||||
|
os.environ['TESTS_FIX_EXAMPLES'] = 'True'
|
||||||
|
# Remove the '--fix-examples' argument from sys.argv as it is not a
|
||||||
|
# valid argument in the run_tests module.
|
||||||
|
sys.argv.pop(sys.argv.index('--fix-examples'))
|
||||||
|
|
||||||
run_tests.main(import_tests)
|
run_tests.main(import_tests)
|
||||||
|
@ -32,3 +32,4 @@ MySQL-python
|
|||||||
Babel>=1.3
|
Babel>=1.3
|
||||||
six>=1.7.0
|
six>=1.7.0
|
||||||
stevedore>=1.1.0 # Apache-2.0
|
stevedore>=1.1.0 # Apache-2.0
|
||||||
|
ordereddict
|
||||||
|
@ -16,6 +16,11 @@
|
|||||||
import urllib
|
import urllib
|
||||||
import six.moves.urllib.parse as urlparse
|
import six.moves.urllib.parse as urlparse
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections import OrderedDict
|
||||||
|
except ImportError:
|
||||||
|
from ordereddict import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
def url_quote(s):
|
def url_quote(s):
|
||||||
if s is None:
|
if s is None:
|
||||||
@ -88,7 +93,8 @@ class AppUrl(object):
|
|||||||
# then add &foo=bar to the URL.
|
# then add &foo=bar to the URL.
|
||||||
parsed_url = urlparse.urlparse(self.url)
|
parsed_url = urlparse.urlparse(self.url)
|
||||||
# Build a dictionary out of the query parameters in the URL
|
# Build a dictionary out of the query parameters in the URL
|
||||||
query_params = dict(urlparse.parse_qsl(parsed_url.query))
|
# with an OrderedDict to preserve the order of the URL.
|
||||||
|
query_params = OrderedDict(urlparse.parse_qsl(parsed_url.query))
|
||||||
# Use kwargs to change or update any values in the query dict.
|
# Use kwargs to change or update any values in the query dict.
|
||||||
query_params.update(kwargs)
|
query_params.update(kwargs)
|
||||||
|
|
||||||
|
@ -166,32 +166,47 @@ class SnippetWriter(object):
|
|||||||
file.write("%s\n" % line)
|
file.write("%s\n" % line)
|
||||||
|
|
||||||
def assert_output_matches():
|
def assert_output_matches():
|
||||||
# If this test is failing for you, comment out this next
|
|
||||||
if os.path.isfile(filename):
|
if os.path.isfile(filename):
|
||||||
with open(filename, 'r') as original_file:
|
with open(filename, 'r') as original_file:
|
||||||
original = original_file.read()
|
original = original_file.read()
|
||||||
if empty:
|
if empty:
|
||||||
fail('Error: output missing in new snippet generation '
|
fail('Error: output missing in new snippet generation '
|
||||||
'for %s. Old content follows:\n"""%s"""'
|
'for %s. Old content follows:\n"""%s"""'
|
||||||
% (filename, original))
|
% (filename, original))
|
||||||
expected = original.split('\n')
|
elif filename.endswith('.json'):
|
||||||
# Remove the last item which will look like a duplicated
|
assert_json_matches(original)
|
||||||
# file ending newline
|
else:
|
||||||
expected.pop()
|
assert_file_matches(original)
|
||||||
diff = '\n'.join(goofy_diff(expected, actual))
|
|
||||||
if diff:
|
|
||||||
fail('Error: output files differ for %s:\n%s'
|
|
||||||
% (filename, diff))
|
|
||||||
elif not empty:
|
elif not empty:
|
||||||
fail('Error: new file necessary where there was no file '
|
fail('Error: new file necessary where there was no file '
|
||||||
'before. Filename=%s\nContent follows:\n"""%s"""'
|
'before. Filename=%s\nContent follows:\n"""%s"""'
|
||||||
% (filename, output))
|
% (filename, output))
|
||||||
|
|
||||||
# If this test is failing for you, comment out this line, generate
|
def assert_file_matches(original):
|
||||||
# the files, and then commit the changed files as part of your review.
|
expected = original.split('\n')
|
||||||
#assert_output_matches()
|
# Remove the last item which will look like a duplicated
|
||||||
|
# file ending newline
|
||||||
|
expected.pop()
|
||||||
|
diff = '\n'.join(goofy_diff(expected, actual))
|
||||||
|
if diff:
|
||||||
|
fail('Error: output files differ for %s:\n%s'
|
||||||
|
% (filename, diff))
|
||||||
|
|
||||||
if not empty:
|
def assert_json_matches(original):
|
||||||
|
try:
|
||||||
|
expected = json.loads(original)
|
||||||
|
actual = json.loads(output)
|
||||||
|
except ValueError:
|
||||||
|
fail('Invalid json!\nExpected: %s\nActual: %s'
|
||||||
|
% (original, output))
|
||||||
|
|
||||||
|
if expected != actual:
|
||||||
|
# Re-Use the same failure output if the json is different
|
||||||
|
assert_file_matches(original)
|
||||||
|
|
||||||
|
if not os.environ.get('TESTS_FIX_EXAMPLES'):
|
||||||
|
assert_output_matches()
|
||||||
|
elif not empty:
|
||||||
write_actual_file()
|
write_actual_file()
|
||||||
|
|
||||||
|
|
||||||
|
@ -15,6 +15,7 @@
|
|||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
import functools
|
||||||
|
|
||||||
from proboscis import before_class
|
from proboscis import before_class
|
||||||
from proboscis import test
|
from proboscis import test
|
||||||
@ -37,6 +38,7 @@ trove_client._logger.setLevel(logging.CRITICAL)
|
|||||||
|
|
||||||
FAKE_INFO = {'m': 30, 's': 0, 'uuid': 'abcdef00-aaaa-aaaa-aaaa-bbbbbbbbbbbb'}
|
FAKE_INFO = {'m': 30, 's': 0, 'uuid': 'abcdef00-aaaa-aaaa-aaaa-bbbbbbbbbbbb'}
|
||||||
EXAMPLE_BACKUP_ID = "a9832168-7541-4536-b8d9-a8a9b79cf1b4"
|
EXAMPLE_BACKUP_ID = "a9832168-7541-4536-b8d9-a8a9b79cf1b4"
|
||||||
|
EXAMPLE_BACKUP_INCREMENTAL_ID = "2e351a71-dd28-4bcb-a7d6-d36a5b487173"
|
||||||
EXAMPLE_CONFIG_ID = "43a6ea86-e959-4735-9e46-a6a5d4a2d80f"
|
EXAMPLE_CONFIG_ID = "43a6ea86-e959-4735-9e46-a6a5d4a2d80f"
|
||||||
EXAMPLE_INSTANCE_ID = "44b277eb-39be-4921-be31-3d61b43651d7"
|
EXAMPLE_INSTANCE_ID = "44b277eb-39be-4921-be31-3d61b43651d7"
|
||||||
EXAMPLE_INSTANCE_ID_2 = "d5a9db64-7ef7-41c5-8e1e-4013166874bc"
|
EXAMPLE_INSTANCE_ID_2 = "d5a9db64-7ef7-41c5-8e1e-4013166874bc"
|
||||||
@ -821,18 +823,31 @@ class Backups(ActiveMixin):
|
|||||||
@test
|
@test
|
||||||
def create_backup(self):
|
def create_backup(self):
|
||||||
set_fake_stuff(uuid=EXAMPLE_BACKUP_ID)
|
set_fake_stuff(uuid=EXAMPLE_BACKUP_ID)
|
||||||
|
|
||||||
def create_backup(client):
|
|
||||||
backup = client.backups.create(name='snapshot',
|
|
||||||
instance=json_instance.id,
|
|
||||||
description="My Backup")
|
|
||||||
with open("/tmp/mario", 'a') as f:
|
|
||||||
f.write("BACKUP = %s\n" % backup.id)
|
|
||||||
return backup
|
|
||||||
|
|
||||||
results = self.snippet(
|
results = self.snippet(
|
||||||
"backup_create", "/backups", "POST", 202, "Accepted",
|
"backup_create", "/backups", "POST", 202, "Accepted",
|
||||||
create_backup)
|
lambda client: client.backups.create(
|
||||||
|
name='snapshot',
|
||||||
|
instance=json_instance.id,
|
||||||
|
description="My Backup"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._wait_for_active("BACKUP")
|
||||||
|
assert_equal(len(results), 1)
|
||||||
|
self.json_backup = results[JSON_INDEX]
|
||||||
|
|
||||||
|
@test
|
||||||
|
def create_incremental_backup(self):
|
||||||
|
set_fake_stuff(uuid=EXAMPLE_BACKUP_INCREMENTAL_ID)
|
||||||
|
results = self.snippet(
|
||||||
|
"backup_create_incremental", "/backups", "POST", 202, "Accepted",
|
||||||
|
lambda client: client.backups.create(
|
||||||
|
name='Incremental Snapshot',
|
||||||
|
instance=json_instance.id,
|
||||||
|
parent_id=EXAMPLE_BACKUP_ID,
|
||||||
|
description="My Incremental Backup"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
self._wait_for_active("BACKUP")
|
self._wait_for_active("BACKUP")
|
||||||
assert_equal(len(results), 1)
|
assert_equal(len(results), 1)
|
||||||
self.json_backup = results[JSON_INDEX]
|
self.json_backup = results[JSON_INDEX]
|
||||||
@ -1076,6 +1091,7 @@ class MgmtAccount(Example):
|
|||||||
|
|
||||||
|
|
||||||
def for_both(func):
|
def for_both(func):
|
||||||
|
@functools.wraps(func)
|
||||||
def both(self):
|
def both(self):
|
||||||
for result in self.results:
|
for result in self.results:
|
||||||
func(self, result)
|
func(self, result)
|
||||||
|
@ -22,6 +22,11 @@ from trove.tests.fakes.common import authorize
|
|||||||
import eventlet
|
import eventlet
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
|
try:
|
||||||
|
from collections import OrderedDict
|
||||||
|
except ImportError:
|
||||||
|
from ordereddict import OrderedDict
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
LOG = logging.getLogger(__name__)
|
||||||
FAKE_HOSTS = ["fake_host_1", "fake_host_2"]
|
FAKE_HOSTS = ["fake_host_1", "fake_host_2"]
|
||||||
|
|
||||||
@ -638,7 +643,9 @@ class FakeHost(object):
|
|||||||
class FakeHosts(object):
|
class FakeHosts(object):
|
||||||
|
|
||||||
def __init__(self, servers):
|
def __init__(self, servers):
|
||||||
self.hosts = {}
|
# Use an ordered dict to make the results of the fake api call
|
||||||
|
# return in the same order for the example generator.
|
||||||
|
self.hosts = OrderedDict()
|
||||||
for host in FAKE_HOSTS:
|
for host in FAKE_HOSTS:
|
||||||
self.add_host(FakeHost(host, servers))
|
self.add_host(FakeHost(host, servers))
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user