Retire repo
This repo was created by accident, use deb-python-os-collect-config instead. Needed-By: I1ac1a06931c8b6dd7c2e73620a0302c29e605f03 Change-Id: I81894aea69b9d09b0977039623c26781093a397a
This commit is contained in:
parent
c2bdb4f672
commit
0508d48fa0
@ -1,7 +0,0 @@
|
||||
[run]
|
||||
branch = True
|
||||
source = os_collect_config
|
||||
omit = os_collect_config/tests/*,os_collect_config/openstack/*
|
||||
|
||||
[report]
|
||||
ignore_errors = True
|
45
.gitignore
vendored
45
.gitignore
vendored
@ -1,45 +0,0 @@
|
||||
*.py[cod]
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Packages
|
||||
*.egg
|
||||
*.egg-info
|
||||
dist
|
||||
build
|
||||
eggs
|
||||
parts
|
||||
bin
|
||||
var
|
||||
sdist
|
||||
develop-eggs
|
||||
.installed.cfg
|
||||
lib
|
||||
lib64
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
.coverage
|
||||
cover
|
||||
.testrepository
|
||||
.tox
|
||||
nosetests.xml
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
|
||||
# Mr Developer
|
||||
.mr.developer.cfg
|
||||
.project
|
||||
.pydevproject
|
||||
|
||||
# OpenStack Generated Files
|
||||
AUTHORS
|
||||
ChangeLog
|
||||
|
||||
# Editors
|
||||
*~
|
||||
*.swp
|
@ -1,4 +0,0 @@
|
||||
[gerrit]
|
||||
host=review.openstack.org
|
||||
port=29418
|
||||
project=openstack/os-collect-config.git
|
@ -1,4 +0,0 @@
|
||||
[DEFAULT]
|
||||
test_command=${PYTHON:-python} -m subunit.run discover -t ./ . $LISTOPT $IDOPTION
|
||||
test_id_option=--load-list $IDFILE
|
||||
test_list_option=--list
|
176
LICENSE
176
LICENSE
@ -1,176 +0,0 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
@ -1,6 +0,0 @@
|
||||
include AUTHORS
|
||||
include ChangeLog
|
||||
exclude .gitignore
|
||||
exclude .gitreview
|
||||
|
||||
global-exclude *.pyc
|
65
README.rst
65
README.rst
@ -1,65 +0,0 @@
|
||||
=================
|
||||
os-collect-config
|
||||
=================
|
||||
|
||||
-------------------------------------------------
|
||||
Collect configuration from cloud metadata sources
|
||||
-------------------------------------------------
|
||||
|
||||
What does it do?
|
||||
================
|
||||
|
||||
It collects data from defined configuration sources and runs a defined
|
||||
hook whenever the metadata has been changed.
|
||||
|
||||
.. image:: os-collect-config-and-friends.svg
|
||||
|
||||
[#update_svg]_
|
||||
|
||||
Usage
|
||||
=====
|
||||
|
||||
You must define what sources to collect configuration data from in
|
||||
*/etc/os-collect-config.conf*.
|
||||
|
||||
The format of this file is::
|
||||
|
||||
[default]
|
||||
command=os-refresh-config
|
||||
|
||||
[cfn]
|
||||
metadata_url=http://192.0.2.99:8000/v1/
|
||||
access_key_id = ABCDEFGHIJLMNOP01234567890
|
||||
secret_access_key = 01234567890ABCDEFGHIJKLMNOP
|
||||
path = MyResource
|
||||
stack_name = my.stack
|
||||
|
||||
These sources will be polled and whenever any of them is changed,
|
||||
*default.command* will be run. A file will be written to the cache
|
||||
dir, os_config_files.json, which will be a json list of the file paths
|
||||
to the current copy of each metadata source. This list will also be
|
||||
set as a colon separated list in the environment variable
|
||||
*OS_CONFIG_FILES* for the command that is run. So in the example
|
||||
above, *os-refresh-config* would be executed with something like this
|
||||
in *OS_CONFIG_FILES*::
|
||||
|
||||
/var/lib/os-collect-config/ec2.json:/var/lib/os-collect-config/cfn.json
|
||||
|
||||
The previous version of the metadata from a source (if available) is present at $FILENAME.last.
|
||||
|
||||
When run without a command, the metadata sources are printed as a json document.
|
||||
|
||||
Quick Start
|
||||
===========
|
||||
|
||||
Install::
|
||||
|
||||
sudo pip install -U git+git://git.openstack.org/openstack/os-collect-config.git
|
||||
|
||||
Run it on an OpenStack instance with access to ec2 metadata::
|
||||
|
||||
os-collect-config
|
||||
|
||||
That should print out a json representation of the entire ec2 metadata tree.
|
||||
|
||||
.. [#update_svg] Recommend using LibreOffice draw to edit os-collect-config-and-friends.odg and regenerate the svg file. Alternatively edit the svg directly, but remove the .odg file if that is done.
|
13
README.txt
Normal file
13
README.txt
Normal file
@ -0,0 +1,13 @@
|
||||
This project is no longer maintained.
|
||||
|
||||
The contents of this repository are still available in the Git
|
||||
source code management system. To see the contents of this
|
||||
repository before it reached its end of life, please check out the
|
||||
previous commit with "git checkout HEAD^1".
|
||||
|
||||
Use instead the project deb-python-os-collect-config at
|
||||
http://git.openstack.org/cgit/openstack/deb-python-os-collect-config .
|
||||
|
||||
For any further questions, please email
|
||||
openstack-dev@lists.openstack.org or join #openstack-dev on
|
||||
Freenode.
|
Binary file not shown.
@ -1,246 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg version="1.2" width="279.4mm" height="215.9mm" viewBox="0 0 27940 21590" preserveAspectRatio="xMidYMid" fill-rule="evenodd" clip-path="url(#presentation_clip_path)" stroke-width="28.222" stroke-linejoin="round" xmlns="http://www.w3.org/2000/svg" xmlns:ooo="http://xml.openoffice.org/svg/export" xmlns:xlink="http://www.w3.org/1999/xlink" xml:space="preserve">
|
||||
<defs class="ClipPathGroup">
|
||||
<clipPath id="presentation_clip_path" clipPathUnits="userSpaceOnUse">
|
||||
<rect x="0" y="0" width="27940" height="21590"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
<defs class="TextShapeIndex">
|
||||
<g ooo:slide="id1" ooo:id-list="id3 id4 id5 id6 id7 id8 id9 id10 id11 id12 id13 id14 id15 id16 id17 id18 id19 id20 id21 id22 id23 id24 id25 id26"/>
|
||||
</defs>
|
||||
<defs class="EmbeddedBulletChars">
|
||||
<g id="bullet-char-template(57356)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 580,1141 L 1163,571 580,0 -4,571 580,1141 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(57354)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 8,1128 L 1137,1128 1137,0 8,0 8,1128 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(10146)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 174,0 L 602,739 174,1481 1456,739 174,0 Z M 1358,739 L 309,1346 659,739 1358,739 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(10132)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 2015,739 L 1276,0 717,0 1260,543 174,543 174,936 1260,936 717,1481 1274,1481 2015,739 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(10007)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 0,-2 C -7,14 -16,27 -25,37 L 356,567 C 262,823 215,952 215,954 215,979 228,992 255,992 264,992 276,990 289,987 310,991 331,999 354,1012 L 381,999 492,748 772,1049 836,1024 860,1049 C 881,1039 901,1025 922,1006 886,937 835,863 770,784 769,783 710,716 594,584 L 774,223 C 774,196 753,168 711,139 L 727,119 C 717,90 699,76 672,76 641,76 570,178 457,381 L 164,-76 C 142,-110 111,-127 72,-127 30,-127 9,-110 8,-76 1,-67 -2,-52 -2,-32 -2,-23 -1,-13 0,-2 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(10004)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 285,-33 C 182,-33 111,30 74,156 52,228 41,333 41,471 41,549 55,616 82,672 116,743 169,778 240,778 293,778 328,747 346,684 L 369,508 C 377,444 397,411 428,410 L 1163,1116 C 1174,1127 1196,1133 1229,1133 1271,1133 1292,1118 1292,1087 L 1292,965 C 1292,929 1282,901 1262,881 L 442,47 C 390,-6 338,-33 285,-33 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(9679)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 813,0 C 632,0 489,54 383,161 276,268 223,411 223,592 223,773 276,916 383,1023 489,1130 632,1184 813,1184 992,1184 1136,1130 1245,1023 1353,916 1407,772 1407,592 1407,412 1353,268 1245,161 1136,54 992,0 813,0 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(8226)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M 346,457 C 273,457 209,483 155,535 101,586 74,649 74,723 74,796 101,859 155,911 209,963 273,989 346,989 419,989 480,963 531,910 582,859 608,796 608,723 608,648 583,586 532,535 482,483 420,457 346,457 Z"/>
|
||||
</g>
|
||||
<g id="bullet-char-template(8211)" transform="scale(0.00048828125,-0.00048828125)">
|
||||
<path d="M -4,459 L 1135,459 1135,606 -4,606 -4,459 Z"/>
|
||||
</g>
|
||||
</defs>
|
||||
<defs class="TextEmbeddedBitmaps"/>
|
||||
<g>
|
||||
<g id="id2" class="Master_Slide">
|
||||
<g id="bg-id2" class="Background"/>
|
||||
<g id="bo-id2" class="BackgroundObjects"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="SlideGroup">
|
||||
<g>
|
||||
<g id="id1" class="Slide" clip-path="url(#presentation_clip_path)">
|
||||
<g class="Page">
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id3">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 6389,2777 C 5597,2777 4937,2890 4937,3027 L 4937,4532 C 4937,4669 5597,4783 6389,4783 7181,4783 7841,4669 7841,4532 L 7841,3027 C 7841,2890 7181,2777 6389,2777 L 6389,2777 Z M 4937,2777 L 4937,2777 Z M 7842,4783 L 7842,4783 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 6389,2777 C 5597,2777 4937,2890 4937,3027 L 4937,4532 C 4937,4669 5597,4783 6389,4783 7181,4783 7841,4669 7841,4532 L 7841,3027 C 7841,2890 7181,2777 6389,2777 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 4937,2777 L 4937,2777 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 7842,4783 L 7842,4783 Z"/>
|
||||
<path fill="rgb(240,240,240)" stroke="none" d="M 6389,2777 C 5597,2777 4937,2890 4937,3027 4937,3164 5597,3278 6389,3278 7181,3278 7841,3164 7841,3027 7841,2890 7181,2777 6389,2777 L 6389,2777 Z M 4937,2777 L 4937,2777 Z M 7842,4783 L 7842,4783 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 6389,2777 C 5597,2777 4937,2890 4937,3027 4937,3164 5597,3278 6389,3278 7181,3278 7841,3164 7841,3027 7841,2890 7181,2777 6389,2777 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 4937,2777 L 4937,2777 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 7842,4783 L 7842,4783 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="5445" y="3812"><tspan fill="rgb(0,0,0)" stroke="none">Heat local</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="5502" y="4290"><tspan fill="rgb(0,0,0)" stroke="none">Metadata</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id4">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 13494,4784 L 11396,4784 11396,2778 15592,2778 15592,4784 13494,4784 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 13494,4784 L 11396,4784 11396,2778 15592,2778 15592,4784 13494,4784 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="12065" y="3688"><tspan fill="rgb(0,0,0)" stroke="none">EC2 meta-data</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="12823" y="4166"><tspan fill="rgb(0,0,0)" stroke="none">service</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id5">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 21889,4784 L 19791,4784 19791,2778 23987,2778 23987,4784 21889,4784 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21889,4784 L 19791,4784 19791,2778 23987,2778 23987,4784 21889,4784 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="20494" y="3688"><tspan fill="rgb(0,0,0)" stroke="none">Heat Metadata</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="21218" y="4166"><tspan fill="rgb(0,0,0)" stroke="none">service</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id6">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 13494,9193 L 11396,9193 11396,7188 15592,7188 15592,9193 13494,9193 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 13494,9193 L 11396,9193 11396,7188 15592,7188 15592,9193 13494,9193 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="11962" y="8337"><tspan fill="rgb(0,0,0)" stroke="none">os-collect-config</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id7">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 13499,12928 L 11401,12928 11401,10922 15597,10922 15597,12928 13499,12928 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 13499,12928 L 11401,12928 11401,10922 15597,10922 15597,12928 13499,12928 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="11908" y="11832"><tspan fill="rgb(0,0,0)" stroke="none">os-refresh-config</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="12566" y="12310"><tspan fill="rgb(0,0,0)" stroke="none">(+ scripts)</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id8">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 21645,10932 C 20852,10932 20192,11045 20192,11182 L 20192,12687 C 20192,12824 20852,12938 21645,12938 22437,12938 23098,12824 23098,12687 L 23098,11182 C 23098,11045 22437,10932 21645,10932 L 21645,10932 Z M 20192,10932 L 20192,10932 Z M 23098,12938 L 23098,12938 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21645,10932 C 20852,10932 20192,11045 20192,11182 L 20192,12687 C 20192,12824 20852,12938 21645,12938 22437,12938 23098,12824 23098,12687 L 23098,11182 C 23098,11045 22437,10932 21645,10932 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 20192,10932 L 20192,10932 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 23098,12938 L 23098,12938 Z"/>
|
||||
<path fill="rgb(240,240,240)" stroke="none" d="M 21645,10932 C 20852,10932 20192,11045 20192,11182 20192,11319 20852,11433 21645,11433 22437,11433 23098,11319 23098,11182 23098,11045 22437,10932 21645,10932 L 21645,10932 Z M 20192,10932 L 20192,10932 Z M 23098,12938 L 23098,12938 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21645,10932 C 20852,10932 20192,11045 20192,11182 20192,11319 20852,11433 21645,11433 22437,11433 23098,11319 23098,11182 23098,11045 22437,10932 21645,10932 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 20192,10932 L 20192,10932 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 23098,12938 L 23098,12938 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="20191" y="11967"><tspan fill="rgb(0,0,0)" stroke="none">Local Metadata</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="21031" y="12445"><tspan fill="rgb(0,0,0)" stroke="none">Cache</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id9">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 13499,17136 L 11401,17136 11401,15130 15597,15130 15597,17136 13499,17136 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 13499,17136 L 11401,17136 11401,15130 15597,15130 15597,17136 13499,17136 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="12060" y="16279"><tspan fill="rgb(0,0,0)" stroke="none">os-apply-config</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id10">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 6389,15130 C 5597,15130 4937,15243 4937,15380 L 4937,16885 C 4937,17022 5597,17136 6389,17136 7181,17136 7841,17022 7841,16885 L 7841,15380 C 7841,15243 7181,15130 6389,15130 L 6389,15130 Z M 4937,15130 L 4937,15130 Z M 7842,17136 L 7842,17136 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 6389,15130 C 5597,15130 4937,15243 4937,15380 L 4937,16885 C 4937,17022 5597,17136 6389,17136 7181,17136 7841,17022 7841,16885 L 7841,15380 C 7841,15243 7181,15130 6389,15130 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 4937,15130 L 4937,15130 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 7842,17136 L 7842,17136 Z"/>
|
||||
<path fill="rgb(240,240,240)" stroke="none" d="M 6389,15130 C 5597,15130 4937,15243 4937,15380 4937,15517 5597,15631 6389,15631 7181,15631 7841,15517 7841,15380 7841,15243 7181,15130 6389,15130 L 6389,15130 Z M 4937,15130 L 4937,15130 Z M 7842,17136 L 7842,17136 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 6389,15130 C 5597,15130 4937,15243 4937,15380 4937,15517 5597,15631 6389,15631 7181,15631 7841,15517 7841,15380 7841,15243 7181,15130 6389,15130 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 4937,15130 L 4937,15130 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 7842,17136 L 7842,17136 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="5564" y="16165"><tspan fill="rgb(0,0,0)" stroke="none">In-image</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="5468" y="16643"><tspan fill="rgb(0,0,0)" stroke="none">templates</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id11">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 6389,4783 L 6389,8191 10966,8191"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 11396,8191 L 10946,8041 10946,8341 11396,8191 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id12">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 13494,4784 L 13494,6759"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 13494,7189 L 13644,6739 13344,6739 13494,7189 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id13">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 21889,4784 L 21889,5986 13494,5986 13494,6759"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 13494,7189 L 13644,6739 13344,6739 13494,7189 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id14">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 15592,8191 L 21646,8191 21646,10503"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 21646,10933 L 21796,10483 21496,10483 21646,10933 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id15">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 13494,9194 L 13494,10058 13499,10058 13499,10492"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 13499,10922 L 13649,10472 13349,10472 13499,10922 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id16">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 13499,12928 L 13499,14700"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 13499,15130 L 13649,14680 13349,14680 13499,15130 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id17">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 7841,16134 L 10971,16133"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 11401,16133 L 10951,15983 10951,16283 11401,16133 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="Group">
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id18">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 19585,16420 C 18792,16420 18132,16533 18132,16670 L 18132,18175 C 18132,18312 18792,18426 19585,18426 20377,18426 21038,18312 21038,18175 L 21038,16670 C 21038,16533 20377,16420 19585,16420 L 19585,16420 Z M 18132,16420 L 18132,16420 Z M 21038,18426 L 21038,18426 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 19585,16420 C 18792,16420 18132,16533 18132,16670 L 18132,18175 C 18132,18312 18792,18426 19585,18426 20377,18426 21038,18312 21038,18175 L 21038,16670 C 21038,16533 20377,16420 19585,16420 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 18132,16420 L 18132,16420 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21038,18426 L 21038,18426 Z"/>
|
||||
<path fill="rgb(240,240,240)" stroke="none" d="M 19585,16420 C 18792,16420 18132,16533 18132,16670 18132,16807 18792,16921 19585,16921 20377,16921 21038,16807 21038,16670 21038,16533 20377,16420 19585,16420 L 19585,16420 Z M 18132,16420 L 18132,16420 Z M 21038,18426 L 21038,18426 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 19585,16420 C 18792,16420 18132,16533 18132,16670 18132,16807 18792,16921 19585,16921 20377,16921 21038,16807 21038,16670 21038,16533 20377,16420 19585,16420 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 18132,16420 L 18132,16420 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21038,18426 L 21038,18426 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id19">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 20023,16662 C 19230,16662 18570,16775 18570,16912 L 18570,18417 C 18570,18554 19230,18668 20023,18668 20815,18668 21476,18554 21476,18417 L 21476,16912 C 21476,16775 20815,16662 20023,16662 L 20023,16662 Z M 18570,16662 L 18570,16662 Z M 21476,18668 L 21476,18668 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 20023,16662 C 19230,16662 18570,16775 18570,16912 L 18570,18417 C 18570,18554 19230,18668 20023,18668 20815,18668 21476,18554 21476,18417 L 21476,16912 C 21476,16775 20815,16662 20023,16662 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 18570,16662 L 18570,16662 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21476,18668 L 21476,18668 Z"/>
|
||||
<path fill="rgb(240,240,240)" stroke="none" d="M 20023,16662 C 19230,16662 18570,16775 18570,16912 18570,17049 19230,17163 20023,17163 20815,17163 21476,17049 21476,16912 21476,16775 20815,16662 20023,16662 L 20023,16662 Z M 18570,16662 L 18570,16662 Z M 21476,18668 L 21476,18668 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 20023,16662 C 19230,16662 18570,16775 18570,16912 18570,17049 19230,17163 20023,17163 20815,17163 21476,17049 21476,16912 21476,16775 20815,16662 20023,16662 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 18570,16662 L 18570,16662 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21476,18668 L 21476,18668 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id20">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 20458,16901 C 19665,16901 19005,17014 19005,17151 L 19005,18656 C 19005,18793 19665,18907 20458,18907 21250,18907 21911,18793 21911,18656 L 21911,17151 C 21911,17014 21250,16901 20458,16901 L 20458,16901 Z M 19005,16901 L 19005,16901 Z M 21911,18907 L 21911,18907 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 20458,16901 C 19665,16901 19005,17014 19005,17151 L 19005,18656 C 19005,18793 19665,18907 20458,18907 21250,18907 21911,18793 21911,18656 L 21911,17151 C 21911,17014 21250,16901 20458,16901 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 19005,16901 L 19005,16901 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21911,18907 L 21911,18907 Z"/>
|
||||
<path fill="rgb(240,240,240)" stroke="none" d="M 20458,16901 C 19665,16901 19005,17014 19005,17151 19005,17288 19665,17402 20458,17402 21250,17402 21911,17288 21911,17151 21911,17014 21250,16901 20458,16901 L 20458,16901 Z M 19005,16901 L 19005,16901 Z M 21911,18907 L 21911,18907 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 20458,16901 C 19665,16901 19005,17014 19005,17151 19005,17288 19665,17402 20458,17402 21250,17402 21911,17288 21911,17151 21911,17014 21250,16901 20458,16901 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 19005,16901 L 19005,16901 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 21911,18907 L 21911,18907 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.TextShape">
|
||||
<g id="id21">
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="19129" y="17920"><tspan fill="rgb(0,0,0)" stroke="none">Local Configs</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id22">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 13499,17136 L 13499,17664 17703,17664"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 18133,17664 L 17683,17514 17683,17814 18133,17664 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.CustomShape">
|
||||
<g id="id23">
|
||||
<path fill="rgb(230,230,230)" stroke="none" d="M 6654,12928 L 4556,12928 4556,10922 8752,10922 8752,12928 6654,12928 Z"/>
|
||||
<path fill="none" stroke="rgb(52,101,175)" d="M 6654,12928 L 4556,12928 4556,10922 8752,10922 8752,12928 6654,12928 Z"/>
|
||||
<text class="TextShape"><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="4921" y="11832"><tspan fill="rgb(0,0,0)" stroke="none">ephemeral system</tspan></tspan></tspan><tspan class="TextParagraph" font-family="Liberation Sans, sans-serif" font-size="423px" font-weight="400"><tspan class="TextPosition" x="6193" y="12310"><tspan fill="rgb(0,0,0)" stroke="none">state</tspan></tspan></tspan></text>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id24">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 11401,11925 L 9182,11925"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 8752,11925 L 9202,12075 9202,11775 8752,11925 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id25">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 21646,12938 L 15977,15932"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 15597,16133 L 16065,16055 15925,15790 15597,16133 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
<g class="com.sun.star.drawing.ConnectorShape">
|
||||
<g id="id26">
|
||||
<path fill="none" stroke="rgb(0,0,0)" d="M 20193,11936 L 16027,11926"/>
|
||||
<path fill="rgb(0,0,0)" stroke="none" d="M 15597,11925 L 16047,12076 16047,11776 15597,11925 Z"/>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 22 KiB |
@ -1,85 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
"""Metadata cache.
|
||||
|
||||
Files within the cache as passed to hook commands invoked by
|
||||
os-collect-command.
|
||||
|
||||
The cache also stores the last version of a file in order to detect changes
|
||||
that occur - hook commands are only automatically invoked when one or more
|
||||
metadata sources have changed things.
|
||||
|
||||
The last version of a file is available under $FILENAME.last.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
from oslo_config import cfg
|
||||
|
||||
|
||||
def get_path(name):
|
||||
return os.path.join(cfg.CONF.cachedir, '%s.json' % name)
|
||||
|
||||
|
||||
def store(name, content):
|
||||
if not os.path.exists(cfg.CONF.cachedir):
|
||||
os.mkdir(cfg.CONF.cachedir)
|
||||
|
||||
changed = False
|
||||
dest_path = get_path(name)
|
||||
orig_path = '%s.orig' % dest_path
|
||||
last_path = '%s.last' % dest_path
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
dir=cfg.CONF.cachedir,
|
||||
delete=False) as new:
|
||||
new.write(json.dumps(content, indent=1).encode('utf-8'))
|
||||
new.flush()
|
||||
if not os.path.exists(orig_path):
|
||||
shutil.copy(new.name, orig_path)
|
||||
changed = True
|
||||
os.rename(new.name, dest_path)
|
||||
|
||||
if not changed:
|
||||
if os.path.exists(last_path):
|
||||
with open(last_path) as then:
|
||||
then_value = json.load(then)
|
||||
if then_value != content:
|
||||
changed = True
|
||||
else:
|
||||
changed = True
|
||||
return (changed, dest_path)
|
||||
|
||||
|
||||
def commit(name):
|
||||
dest_path = get_path(name)
|
||||
if os.path.exists(dest_path):
|
||||
shutil.copy(dest_path, '%s.last' % dest_path)
|
||||
|
||||
|
||||
def store_meta_list(name, data_keys):
|
||||
'''Store a json list of the files that should be present after store.'''
|
||||
final_list = [get_path(k) for k in data_keys]
|
||||
dest = get_path(name)
|
||||
with tempfile.NamedTemporaryFile(prefix='tmp_meta_list.',
|
||||
dir=os.path.dirname(dest),
|
||||
delete=False) as out:
|
||||
out.write(json.dumps(final_list).encode('utf-8'))
|
||||
os.rename(out.name, dest)
|
||||
return dest
|
@ -1,143 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from keystoneclient.contrib.ec2 import utils as ec2_utils
|
||||
from lxml import etree
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
import six.moves.urllib.parse as urlparse
|
||||
|
||||
from os_collect_config import common
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import merger
|
||||
|
||||
CONF = cfg.CONF
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('metadata-url',
|
||||
help='URL to query for CloudFormation Metadata'),
|
||||
cfg.StrOpt('heat-metadata-hint',
|
||||
default='/var/lib/heat-cfntools/cfn-metadata-server',
|
||||
help='Local file to read for metadata url if not explicitly '
|
||||
' specified'),
|
||||
cfg.StrOpt('ca_certificate', help='CA Certificate path'),
|
||||
cfg.StrOpt('stack-name',
|
||||
help='Stack name to describe'),
|
||||
cfg.MultiStrOpt('path',
|
||||
help='Path to Metadata'),
|
||||
cfg.StrOpt('secret-access-key',
|
||||
help='Secret Access Key'),
|
||||
cfg.StrOpt('access-key-id',
|
||||
help='Access Key ID'),
|
||||
cfg.MultiStrOpt('deployment-key',
|
||||
default=['deployments'],
|
||||
help='DEPRECATED, use global configuration option '
|
||||
'"deployment-key"'),
|
||||
cfg.FloatOpt('timeout', default=10,
|
||||
help='Seconds to wait for the connection and read request'
|
||||
' timeout.')
|
||||
]
|
||||
name = 'cfn'
|
||||
|
||||
|
||||
class Collector(object):
|
||||
|
||||
def __init__(self, requests_impl=common.requests):
|
||||
self._requests_impl = requests_impl
|
||||
self._session = requests_impl.Session()
|
||||
|
||||
def collect(self):
|
||||
if CONF.cfn.metadata_url is None:
|
||||
if (CONF.cfn.heat_metadata_hint
|
||||
and os.path.exists(CONF.cfn.heat_metadata_hint)):
|
||||
with open(CONF.cfn.heat_metadata_hint) as hint:
|
||||
CONF.cfn.metadata_url = '%s/v1/' % hint.read().strip()
|
||||
else:
|
||||
logger.info('No metadata_url configured.')
|
||||
raise exc.CfnMetadataNotConfigured
|
||||
if CONF.cfn.access_key_id is None:
|
||||
logger.info('No Access Key ID configured.')
|
||||
raise exc.CfnMetadataNotConfigured
|
||||
if CONF.cfn.secret_access_key is None:
|
||||
logger.info('No Secret Access Key configured.')
|
||||
raise exc.CfnMetadataNotConfigured
|
||||
url = CONF.cfn.metadata_url
|
||||
stack_name = CONF.cfn.stack_name
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
final_content = {}
|
||||
if CONF.cfn.path is None:
|
||||
logger.info('No path configured')
|
||||
raise exc.CfnMetadataNotConfigured
|
||||
|
||||
signer = ec2_utils.Ec2Signer(secret_key=CONF.cfn.secret_access_key)
|
||||
for path in CONF.cfn.path:
|
||||
if '.' not in path:
|
||||
logger.error('Path not in format resource.field[.x.y] (%s)' %
|
||||
path)
|
||||
raise exc.CfnMetadataNotConfigured
|
||||
resource, field = path.split('.', 1)
|
||||
if '.' in field:
|
||||
field, sub_path = field.split('.', 1)
|
||||
else:
|
||||
sub_path = ''
|
||||
params = {'Action': 'DescribeStackResource',
|
||||
'StackName': stack_name,
|
||||
'LogicalResourceId': resource,
|
||||
'AWSAccessKeyId': CONF.cfn.access_key_id,
|
||||
'SignatureVersion': '2'}
|
||||
parsed_url = urlparse.urlparse(url)
|
||||
credentials = {'params': params,
|
||||
'verb': 'GET',
|
||||
'host': parsed_url.netloc,
|
||||
'path': parsed_url.path}
|
||||
params['Signature'] = signer.generate(credentials)
|
||||
try:
|
||||
content = self._session.get(
|
||||
url, params=params, headers=headers,
|
||||
verify=CONF.cfn.ca_certificate,
|
||||
timeout=CONF.cfn.timeout)
|
||||
content.raise_for_status()
|
||||
except self._requests_impl.exceptions.RequestException as e:
|
||||
logger.warn(e)
|
||||
raise exc.CfnMetadataNotAvailable
|
||||
map_content = etree.fromstring(content.text)
|
||||
resource_detail = map_content.find(
|
||||
'DescribeStackResourceResult').find('StackResourceDetail')
|
||||
sub_element = resource_detail.find(field)
|
||||
if sub_element is None:
|
||||
logger.warn('Path %s does not exist.' % (path))
|
||||
raise exc.CfnMetadataNotAvailable
|
||||
try:
|
||||
value = json.loads(sub_element.text)
|
||||
except ValueError as e:
|
||||
logger.warn(
|
||||
'Path %s failed to parse as json. (%s)' % (path, e))
|
||||
raise exc.CfnMetadataNotAvailable
|
||||
if sub_path:
|
||||
for subkey in sub_path.split('.'):
|
||||
try:
|
||||
value = value[subkey]
|
||||
except KeyError:
|
||||
logger.warn(
|
||||
'Sub-key %s does not exist. (%s)' % (subkey, path))
|
||||
raise exc.CfnMetadataNotAvailable
|
||||
final_content.update(value)
|
||||
final_list = merger.merged_list_from_content(
|
||||
final_content, cfg.CONF.cfn.deployment_key, name)
|
||||
return final_list
|
@ -1,313 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import signal
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from os_collect_config import cache
|
||||
from os_collect_config import cfn
|
||||
from os_collect_config import ec2
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import heat
|
||||
from os_collect_config import heat_local
|
||||
from os_collect_config import keystone
|
||||
from os_collect_config import local
|
||||
from os_collect_config import request
|
||||
from os_collect_config import version
|
||||
from os_collect_config import zaqar
|
||||
|
||||
DEFAULT_COLLECTORS = ['heat_local', 'ec2', 'cfn', 'heat', 'request', 'local',
|
||||
'zaqar']
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('command', short='c',
|
||||
help='Command to run on metadata changes. If specified,'
|
||||
' os-collect-config will continue to run until killed. If'
|
||||
' not specified, os-collect-config will print the'
|
||||
' collected data as a json map and exit.'),
|
||||
cfg.StrOpt('cachedir',
|
||||
default='/var/lib/os-collect-config',
|
||||
help='Directory in which to store local cache of metadata'),
|
||||
cfg.StrOpt('backup-cachedir',
|
||||
default='/var/run/os-collect-config',
|
||||
help='Copy cache contents to this directory as well.'),
|
||||
cfg.MultiStrOpt(
|
||||
'collectors',
|
||||
positional=True,
|
||||
default=DEFAULT_COLLECTORS,
|
||||
help='List the collectors to use. When command is specified the'
|
||||
'collections will be emitted in the order given by this option.'
|
||||
' (default: %s)' % ' '.join(DEFAULT_COLLECTORS)),
|
||||
cfg.BoolOpt('one-time',
|
||||
default=False,
|
||||
help='Pass this option to make os-collect-config exit after'
|
||||
' one execution of command. This behavior is implied if no'
|
||||
' command is specified.'),
|
||||
cfg.FloatOpt('polling-interval', short='i', default=30,
|
||||
help='When running continuously, pause a maximum of this'
|
||||
' many seconds between collecting data. If changes'
|
||||
' are detected shorter sleeps intervals are gradually'
|
||||
' increased to this maximum polling interval.'),
|
||||
cfg.BoolOpt('print-cachedir',
|
||||
default=False,
|
||||
help='Print out the value of cachedir and exit immediately.'),
|
||||
cfg.BoolOpt('force',
|
||||
default=False,
|
||||
help='Pass this to force running the command even if nothing'
|
||||
' has changed. Implies --one-time.'),
|
||||
cfg.BoolOpt('print', dest='print_only',
|
||||
default=False,
|
||||
help='Query normally, print the resulting configs as a json'
|
||||
' map, and exit immediately without running command if it is'
|
||||
' configured.'),
|
||||
cfg.MultiStrOpt('deployment-key',
|
||||
default=['deployments'],
|
||||
help='Key(s) to explode into multiple collected outputs. '
|
||||
'Parsed according to the expected Metadata created by '
|
||||
'OS::Heat::StructuredDeployment. Only Exploded if seen at '
|
||||
'the root of the Metadata.')
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
logger = log.getLogger('os-collect-config')
|
||||
|
||||
COLLECTORS = {ec2.name: ec2,
|
||||
cfn.name: cfn,
|
||||
heat.name: heat,
|
||||
heat_local.name: heat_local,
|
||||
local.name: local,
|
||||
request.name: request,
|
||||
zaqar.name: zaqar}
|
||||
|
||||
|
||||
def setup_conf():
|
||||
ec2_group = cfg.OptGroup(name='ec2',
|
||||
title='EC2 Metadata options')
|
||||
|
||||
cfn_group = cfg.OptGroup(name='cfn',
|
||||
title='CloudFormation API Metadata options')
|
||||
|
||||
heat_local_group = cfg.OptGroup(name='heat_local',
|
||||
title='Heat Local Metadata options')
|
||||
|
||||
local_group = cfg.OptGroup(name='local',
|
||||
title='Local Metadata options')
|
||||
|
||||
heat_group = cfg.OptGroup(name='heat',
|
||||
title='Heat Metadata options')
|
||||
|
||||
zaqar_group = cfg.OptGroup(name='zaqar',
|
||||
title='Zaqar queue options')
|
||||
|
||||
request_group = cfg.OptGroup(name='request',
|
||||
title='Request Metadata options')
|
||||
|
||||
keystone_group = cfg.OptGroup(name='keystone',
|
||||
title='Keystone auth options')
|
||||
|
||||
CONF.register_group(ec2_group)
|
||||
CONF.register_group(cfn_group)
|
||||
CONF.register_group(heat_local_group)
|
||||
CONF.register_group(local_group)
|
||||
CONF.register_group(heat_group)
|
||||
CONF.register_group(request_group)
|
||||
CONF.register_group(keystone_group)
|
||||
CONF.register_group(zaqar_group)
|
||||
CONF.register_cli_opts(ec2.opts, group='ec2')
|
||||
CONF.register_cli_opts(cfn.opts, group='cfn')
|
||||
CONF.register_cli_opts(heat_local.opts, group='heat_local')
|
||||
CONF.register_cli_opts(local.opts, group='local')
|
||||
CONF.register_cli_opts(heat.opts, group='heat')
|
||||
CONF.register_cli_opts(request.opts, group='request')
|
||||
CONF.register_cli_opts(keystone.opts, group='keystone')
|
||||
CONF.register_cli_opts(zaqar.opts, group='zaqar')
|
||||
|
||||
CONF.register_cli_opts(opts)
|
||||
log.register_options(CONF)
|
||||
|
||||
|
||||
def collect_all(collectors, store=False, collector_kwargs_map=None):
|
||||
changed_keys = set()
|
||||
all_keys = list()
|
||||
if store:
|
||||
paths_or_content = []
|
||||
else:
|
||||
paths_or_content = {}
|
||||
|
||||
for collector in collectors:
|
||||
module = COLLECTORS[collector]
|
||||
if collector_kwargs_map and collector in collector_kwargs_map:
|
||||
collector_kwargs = collector_kwargs_map[collector]
|
||||
else:
|
||||
collector_kwargs = {}
|
||||
|
||||
try:
|
||||
content = module.Collector(**collector_kwargs).collect()
|
||||
except exc.SourceNotAvailable:
|
||||
logger.warn('Source [%s] Unavailable.' % collector)
|
||||
continue
|
||||
except exc.SourceNotConfigured:
|
||||
logger.debug('Source [%s] Not configured.' % collector)
|
||||
continue
|
||||
|
||||
if store:
|
||||
for output_key, output_content in content:
|
||||
all_keys.append(output_key)
|
||||
(changed, path) = cache.store(output_key, output_content)
|
||||
if changed:
|
||||
changed_keys.add(output_key)
|
||||
paths_or_content.append(path)
|
||||
else:
|
||||
paths_or_content.update(content)
|
||||
|
||||
if changed_keys:
|
||||
cache.store_meta_list('os_config_files', all_keys)
|
||||
if os.path.exists(CONF.backup_cachedir):
|
||||
shutil.rmtree(CONF.backup_cachedir)
|
||||
if os.path.exists(CONF.cachedir):
|
||||
shutil.copytree(CONF.cachedir, CONF.backup_cachedir)
|
||||
return (changed_keys, paths_or_content)
|
||||
|
||||
|
||||
def reexec_self(signal=None, frame=None):
|
||||
if signal:
|
||||
logger.info('Signal received. Re-executing %s' % sys.argv)
|
||||
# Close all but stdin/stdout/stderr
|
||||
os.closerange(3, 255)
|
||||
os.execv(sys.argv[0], sys.argv)
|
||||
|
||||
|
||||
def call_command(files, command):
|
||||
env = dict(os.environ)
|
||||
env["OS_CONFIG_FILES"] = ':'.join(files)
|
||||
logger.info("Executing %s with OS_CONFIG_FILES=%s" %
|
||||
(command, env["OS_CONFIG_FILES"]))
|
||||
subprocess.check_call(CONF.command, env=env, shell=True)
|
||||
|
||||
|
||||
def getfilehash(files):
|
||||
"""Calculates the md5sum of the contents of a list of files.
|
||||
|
||||
For each readable file in the provided list returns the md5sum of the
|
||||
concatenation of each file
|
||||
:param files: a list of files to be read
|
||||
:returns: string -- resulting md5sum
|
||||
"""
|
||||
m = hashlib.md5()
|
||||
for filename in files:
|
||||
try:
|
||||
with open(filename) as fp:
|
||||
data = fp.read()
|
||||
m.update(data.encode('utf-8'))
|
||||
except IOError:
|
||||
pass
|
||||
return m.hexdigest()
|
||||
|
||||
|
||||
def __main__(args=sys.argv, collector_kwargs_map=None):
|
||||
signal.signal(signal.SIGHUP, reexec_self)
|
||||
setup_conf()
|
||||
CONF(args=args[1:], prog="os-collect-config",
|
||||
version=version.version_info.version_string())
|
||||
|
||||
# This resets the logging infrastructure which prevents capturing log
|
||||
# output in tests cleanly, so should only be called if there isn't already
|
||||
# handlers defined i.e. not in unit tests
|
||||
if not log.getLogger(None).logger.handlers:
|
||||
log.setup(CONF, "os-collect-config")
|
||||
|
||||
if CONF.print_cachedir:
|
||||
print(CONF.cachedir)
|
||||
return
|
||||
|
||||
unknown_collectors = set(CONF.collectors) - set(COLLECTORS.keys())
|
||||
if unknown_collectors:
|
||||
raise exc.InvalidArguments(
|
||||
'Unknown collectors %s. Valid collectors are: %s' %
|
||||
(list(unknown_collectors), DEFAULT_COLLECTORS))
|
||||
|
||||
if CONF.force:
|
||||
CONF.set_override('one_time', True)
|
||||
|
||||
exitval = 0
|
||||
config_files = CONF.config_file
|
||||
config_hash = getfilehash(config_files)
|
||||
sleep_time = 1
|
||||
while True:
|
||||
store_and_run = bool(CONF.command and not CONF.print_only)
|
||||
(changed_keys, content) = collect_all(
|
||||
cfg.CONF.collectors,
|
||||
store=store_and_run,
|
||||
collector_kwargs_map=collector_kwargs_map)
|
||||
if store_and_run:
|
||||
if changed_keys or CONF.force:
|
||||
# shorter sleeps while changes are detected allows for faster
|
||||
# software deployment dependency processing
|
||||
sleep_time = 1
|
||||
# ignore HUP now since we will reexec after commit anyway
|
||||
signal.signal(signal.SIGHUP, signal.SIG_IGN)
|
||||
try:
|
||||
call_command(content, CONF.command)
|
||||
except subprocess.CalledProcessError as e:
|
||||
exitval = e.returncode
|
||||
logger.error('Command failed, will not cache new data. %s'
|
||||
% e)
|
||||
if not CONF.one_time:
|
||||
new_config_hash = getfilehash(config_files)
|
||||
if config_hash == new_config_hash:
|
||||
logger.warn(
|
||||
'Sleeping %.2f seconds before re-exec.' %
|
||||
sleep_time
|
||||
)
|
||||
time.sleep(sleep_time)
|
||||
else:
|
||||
# The command failed but the config file has
|
||||
# changed re-exec now as the config file change
|
||||
# may have fixed things.
|
||||
logger.warn('Config changed, re-execing now')
|
||||
config_hash = new_config_hash
|
||||
else:
|
||||
for changed in changed_keys:
|
||||
cache.commit(changed)
|
||||
if not CONF.one_time:
|
||||
reexec_self()
|
||||
else:
|
||||
logger.debug("No changes detected.")
|
||||
if CONF.one_time:
|
||||
break
|
||||
else:
|
||||
logger.info("Sleeping %.2f seconds.", sleep_time)
|
||||
time.sleep(sleep_time)
|
||||
|
||||
sleep_time *= 2
|
||||
if sleep_time > CONF.polling_interval:
|
||||
sleep_time = CONF.polling_interval
|
||||
else:
|
||||
print(json.dumps(content, indent=1))
|
||||
break
|
||||
return exitval
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(__main__())
|
@ -1,3 +0,0 @@
|
||||
import requests
|
||||
|
||||
__all__ = ['requests']
|
@ -1,64 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from os_collect_config import common
|
||||
from os_collect_config import exc
|
||||
|
||||
EC2_METADATA_URL = 'http://169.254.169.254/latest/meta-data'
|
||||
CONF = cfg.CONF
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('metadata-url',
|
||||
default=EC2_METADATA_URL,
|
||||
help='URL to query for EC2 Metadata'),
|
||||
cfg.FloatOpt('timeout', default=10,
|
||||
help='Seconds to wait for the connection and read request'
|
||||
' timeout.')
|
||||
]
|
||||
name = 'ec2'
|
||||
|
||||
|
||||
class Collector(object):
|
||||
def __init__(self, requests_impl=common.requests):
|
||||
self._requests_impl = requests_impl
|
||||
self.session = requests_impl.Session()
|
||||
|
||||
def _fetch_metadata(self, fetch_url, timeout):
|
||||
try:
|
||||
r = self.session.get(fetch_url, timeout=timeout)
|
||||
r.raise_for_status()
|
||||
except self._requests_impl.exceptions.RequestException as e:
|
||||
log.getLogger(__name__).warn(e)
|
||||
raise exc.Ec2MetadataNotAvailable
|
||||
content = r.text
|
||||
if fetch_url[-1] == '/':
|
||||
new_content = {}
|
||||
for subkey in content.split("\n"):
|
||||
if '=' in subkey:
|
||||
subkey = subkey[:subkey.index('=')] + '/'
|
||||
sub_fetch_url = fetch_url + subkey
|
||||
if subkey[-1] == '/':
|
||||
subkey = subkey[:-1]
|
||||
new_content[subkey] = self._fetch_metadata(
|
||||
sub_fetch_url, timeout)
|
||||
content = new_content
|
||||
return content
|
||||
|
||||
def collect(self):
|
||||
root_url = '%s/' % (CONF.ec2.metadata_url)
|
||||
return [('ec2', self._fetch_metadata(root_url, CONF.ec2.timeout))]
|
@ -1,70 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
class SourceNotAvailable(RuntimeError):
|
||||
"""The requested data source is unavailable."""
|
||||
|
||||
|
||||
class SourceNotConfigured(RuntimeError):
|
||||
"""The requested data source is not configured."""
|
||||
|
||||
|
||||
class Ec2MetadataNotAvailable(SourceNotAvailable):
|
||||
"""The EC2 metadata service is not available."""
|
||||
|
||||
|
||||
class CfnMetadataNotAvailable(SourceNotAvailable):
|
||||
"""The cfn metadata service is not available."""
|
||||
|
||||
|
||||
class HeatMetadataNotAvailable(SourceNotAvailable):
|
||||
"""The heat metadata service is not available."""
|
||||
|
||||
|
||||
class CfnMetadataNotConfigured(SourceNotConfigured):
|
||||
"""The cfn metadata service is not fully configured."""
|
||||
|
||||
|
||||
class HeatMetadataNotConfigured(SourceNotConfigured):
|
||||
"""The heat metadata service is not fully configured."""
|
||||
|
||||
|
||||
class HeatLocalMetadataNotAvailable(SourceNotAvailable):
|
||||
"""The local Heat metadata is not available."""
|
||||
|
||||
|
||||
class LocalMetadataNotAvailable(SourceNotAvailable):
|
||||
"""The local metadata is not available."""
|
||||
|
||||
|
||||
class RequestMetadataNotAvailable(SourceNotAvailable):
|
||||
"""The request metadata is not available."""
|
||||
|
||||
|
||||
class RequestMetadataNotConfigured(SourceNotAvailable):
|
||||
"""The request metadata is not fully configured."""
|
||||
|
||||
|
||||
class ZaqarMetadataNotConfigured(SourceNotConfigured):
|
||||
"""The zaqar metadata service is not fully configured."""
|
||||
|
||||
|
||||
class ZaqarMetadataNotAvailable(SourceNotAvailable):
|
||||
"""The Zaqar metadata is not available."""
|
||||
|
||||
|
||||
class InvalidArguments(ValueError):
|
||||
"""Invalid arguments."""
|
@ -1,92 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from heatclient import client as heatclient
|
||||
from keystoneclient.v3 import client as keystoneclient
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import keystone
|
||||
from os_collect_config import merger
|
||||
|
||||
CONF = cfg.CONF
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('user-id',
|
||||
help='User ID for API authentication'),
|
||||
cfg.StrOpt('password',
|
||||
help='Password for API authentication'),
|
||||
cfg.StrOpt('project-id',
|
||||
help='ID of project for API authentication'),
|
||||
cfg.StrOpt('auth-url',
|
||||
help='URL for API authentication'),
|
||||
cfg.StrOpt('stack-id',
|
||||
help='ID of the stack this deployment belongs to'),
|
||||
cfg.StrOpt('resource-name',
|
||||
help='Name of resource in the stack to be polled'),
|
||||
]
|
||||
name = 'heat'
|
||||
|
||||
|
||||
class Collector(object):
|
||||
def __init__(self,
|
||||
keystoneclient=keystoneclient,
|
||||
heatclient=heatclient):
|
||||
self.keystoneclient = keystoneclient
|
||||
self.heatclient = heatclient
|
||||
|
||||
def collect(self):
|
||||
if CONF.heat.auth_url is None:
|
||||
logger.info('No auth_url configured.')
|
||||
raise exc.HeatMetadataNotConfigured
|
||||
if CONF.heat.password is None:
|
||||
logger.info('No password configured.')
|
||||
raise exc.HeatMetadataNotConfigured
|
||||
if CONF.heat.project_id is None:
|
||||
logger.info('No project_id configured.')
|
||||
raise exc.HeatMetadataNotConfigured
|
||||
if CONF.heat.user_id is None:
|
||||
logger.info('No user_id configured.')
|
||||
raise exc.HeatMetadataNotConfigured
|
||||
if CONF.heat.stack_id is None:
|
||||
logger.info('No stack_id configured.')
|
||||
raise exc.HeatMetadataNotConfigured
|
||||
if CONF.heat.resource_name is None:
|
||||
logger.info('No resource_name configured.')
|
||||
raise exc.HeatMetadataNotConfigured
|
||||
|
||||
try:
|
||||
ks = keystone.Keystone(
|
||||
auth_url=CONF.heat.auth_url,
|
||||
user_id=CONF.heat.user_id,
|
||||
password=CONF.heat.password,
|
||||
project_id=CONF.heat.project_id,
|
||||
keystoneclient=self.keystoneclient).client
|
||||
endpoint = ks.service_catalog.url_for(
|
||||
service_type='orchestration', endpoint_type='publicURL')
|
||||
logger.debug('Fetching metadata from %s' % endpoint)
|
||||
heat = self.heatclient.Client(
|
||||
'1', endpoint, token=ks.auth_token)
|
||||
r = heat.resources.metadata(CONF.heat.stack_id,
|
||||
CONF.heat.resource_name)
|
||||
|
||||
final_list = merger.merged_list_from_content(
|
||||
r, cfg.CONF.deployment_key, name)
|
||||
return final_list
|
||||
|
||||
except Exception as e:
|
||||
logger.warn(str(e))
|
||||
raise exc.HeatMetadataNotAvailable
|
@ -1,58 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from os_collect_config import exc
|
||||
|
||||
HEAT_METADATA_PATH = ['/var/lib/heat-cfntools/cfn-init-data']
|
||||
CONF = cfg.CONF
|
||||
|
||||
opts = [
|
||||
cfg.MultiStrOpt('path',
|
||||
default=HEAT_METADATA_PATH,
|
||||
help='Local path(s) to read for Metadata.')
|
||||
]
|
||||
name = 'heat_local'
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
|
||||
class Collector(object):
|
||||
def __init__(self, requests_impl=None):
|
||||
pass
|
||||
|
||||
def collect(self):
|
||||
final_content = None
|
||||
for path in cfg.CONF.heat_local.path:
|
||||
if os.path.exists(path):
|
||||
with open(path) as metadata:
|
||||
try:
|
||||
value = json.loads(metadata.read())
|
||||
except ValueError as e:
|
||||
logger.info('%s is not valid JSON (%s)' % (path, e))
|
||||
continue
|
||||
if final_content:
|
||||
final_content.update(value)
|
||||
else:
|
||||
final_content = value
|
||||
if not final_content:
|
||||
logger.info('Local metadata not found (%s)' %
|
||||
cfg.CONF.heat_local.path)
|
||||
raise exc.HeatLocalMetadataNotAvailable
|
||||
return [('heat_local', final_content)]
|
@ -1,128 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import hashlib
|
||||
import os
|
||||
|
||||
from dogpile import cache
|
||||
from keystoneclient import discover as ks_discover
|
||||
from keystoneclient import exceptions as ks_exc
|
||||
from keystoneclient.v3 import client as ks_keystoneclient
|
||||
from oslo_config import cfg
|
||||
|
||||
CONF = cfg.CONF
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('cache_dir',
|
||||
help='A directory to store keystone auth tokens.'),
|
||||
cfg.IntOpt('cache_ttl',
|
||||
default=1800,
|
||||
help='Seconds to store auth references in the cache'),
|
||||
]
|
||||
|
||||
|
||||
class Keystone(object):
|
||||
'''A keystone wrapper class.
|
||||
|
||||
This wrapper is used to encapsulate any keystone related operations
|
||||
os-collect-config may need to perform. Includes a dogpile cache to
|
||||
support memoization so we can reuse auth references stored on disk
|
||||
in subsequent invocations of os-collect-config.
|
||||
'''
|
||||
def __init__(self, auth_url, user_id, password, project_id,
|
||||
keystoneclient=None):
|
||||
'''Initialize Keystone wrapper.
|
||||
|
||||
@param string auth_url auth_url for keystoneclient
|
||||
@param string user_id user_id for keystoneclient
|
||||
@param string project_id project_id for keystoneclient
|
||||
@param object keystoneclient optional keystoneclient implementation.
|
||||
Uses keystoneclient.v3 if unspecified.
|
||||
'''
|
||||
self.keystoneclient = keystoneclient or ks_keystoneclient
|
||||
self.user_id = user_id
|
||||
self.password = password
|
||||
self.project_id = project_id
|
||||
self._client = None
|
||||
try:
|
||||
auth_url_noneversion = auth_url.replace('/v2.0', '/')
|
||||
discover = ks_discover.Discover(auth_url=auth_url_noneversion)
|
||||
v3_auth_url = discover.url_for('3.0')
|
||||
if v3_auth_url:
|
||||
self.auth_url = v3_auth_url
|
||||
else:
|
||||
self.auth_url = auth_url
|
||||
except ks_exc.ClientException:
|
||||
self.auth_url = auth_url.replace('/v2.0', '/v3')
|
||||
if CONF.keystone.cache_dir:
|
||||
if not os.path.isdir(CONF.keystone.cache_dir):
|
||||
os.makedirs(CONF.keystone.cache_dir, mode=0o700)
|
||||
|
||||
dbm_path = os.path.join(CONF.keystone.cache_dir, 'keystone.db')
|
||||
self.cache = cache.make_region().configure(
|
||||
'dogpile.cache.dbm',
|
||||
expiration_time=CONF.keystone.cache_ttl,
|
||||
arguments={"filename": dbm_path})
|
||||
else:
|
||||
self.cache = None
|
||||
|
||||
def _make_key(self, key):
|
||||
m = hashlib.sha256()
|
||||
m.update(self.auth_url.encode('utf-8'))
|
||||
m.update(self.user_id.encode('utf-8'))
|
||||
m.update(self.project_id.encode('utf-8'))
|
||||
m.update(key.encode('utf-8'))
|
||||
return m.hexdigest()
|
||||
|
||||
@property
|
||||
def client(self):
|
||||
if not self._client:
|
||||
ref = self._get_auth_ref_from_cache()
|
||||
if ref:
|
||||
self._client = self.keystoneclient.Client(
|
||||
auth_ref=ref)
|
||||
else:
|
||||
self._client = self.keystoneclient.Client(
|
||||
auth_url=self.auth_url,
|
||||
user_id=self.user_id,
|
||||
password=self.password,
|
||||
project_id=self.project_id)
|
||||
return self._client
|
||||
|
||||
def _get_auth_ref_from_cache(self):
|
||||
if self.cache:
|
||||
key = self._make_key('auth_ref')
|
||||
return self.cache.get(key)
|
||||
|
||||
@property
|
||||
def auth_ref(self):
|
||||
ref = self._get_auth_ref_from_cache()
|
||||
if not ref:
|
||||
ref = self.client.get_auth_ref()
|
||||
if self.cache:
|
||||
self.cache.set(self._make_key('auth_ref'), ref)
|
||||
return ref
|
||||
|
||||
def invalidate_auth_ref(self):
|
||||
if self.cache:
|
||||
key = self._make_key('auth_ref')
|
||||
return self.cache.delete(key)
|
||||
|
||||
@property
|
||||
def service_catalog(self):
|
||||
try:
|
||||
return self.client.service_catalog
|
||||
except ks_exc.AuthorizationFailure:
|
||||
self.invalidate_auth_ref()
|
||||
return self.client.service_catalog
|
@ -1,102 +0,0 @@
|
||||
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import locale
|
||||
import os
|
||||
import stat
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from os_collect_config import exc
|
||||
|
||||
LOCAL_DEFAULT_PATHS = ['/var/lib/os-collect-config/local-data']
|
||||
CONF = cfg.CONF
|
||||
|
||||
opts = [
|
||||
cfg.MultiStrOpt('path',
|
||||
default=LOCAL_DEFAULT_PATHS,
|
||||
help='Local directory to scan for Metadata files.')
|
||||
]
|
||||
name = 'local'
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
|
||||
def _dest_looks_insecure(local_path):
|
||||
'''We allow group writable so owner can let others write.'''
|
||||
looks_insecure = False
|
||||
uid = os.getuid()
|
||||
st = os.stat(local_path)
|
||||
if uid != st[stat.ST_UID]:
|
||||
logger.error('%s is owned by another user. This is a'
|
||||
' security risk.' % local_path)
|
||||
looks_insecure = True
|
||||
if st.st_mode & stat.S_IWOTH:
|
||||
logger.error('%s is world writable. This is a security risk.'
|
||||
% local_path)
|
||||
looks_insecure = True
|
||||
return looks_insecure
|
||||
|
||||
|
||||
class Collector(object):
|
||||
def __init__(self, requests_impl=None):
|
||||
pass
|
||||
|
||||
def collect(self):
|
||||
if len(cfg.CONF.local.path) == 0:
|
||||
raise exc.LocalMetadataNotAvailable
|
||||
final_content = []
|
||||
for local_path in cfg.CONF.local.path:
|
||||
try:
|
||||
os.stat(local_path)
|
||||
except OSError:
|
||||
logger.warn("%s not found. Skipping", local_path)
|
||||
continue
|
||||
if _dest_looks_insecure(local_path):
|
||||
raise exc.LocalMetadataNotAvailable
|
||||
for data_file in os.listdir(local_path):
|
||||
if data_file.startswith('.'):
|
||||
continue
|
||||
data_file = os.path.join(local_path, data_file)
|
||||
if os.path.isdir(data_file):
|
||||
continue
|
||||
st = os.stat(data_file)
|
||||
if st.st_mode & stat.S_IWOTH:
|
||||
logger.error(
|
||||
'%s is world writable. This is a security risk.' %
|
||||
data_file)
|
||||
raise exc.LocalMetadataNotAvailable
|
||||
with open(data_file) as metadata:
|
||||
try:
|
||||
value = json.loads(metadata.read())
|
||||
except ValueError as e:
|
||||
logger.error(
|
||||
'%s is not valid JSON (%s)' % (data_file, e))
|
||||
raise exc.LocalMetadataNotAvailable
|
||||
basename = os.path.basename(data_file)
|
||||
final_content.append((basename, value))
|
||||
if not final_content:
|
||||
logger.warn('No local metadata found (%s)' %
|
||||
cfg.CONF.local.path)
|
||||
|
||||
# Now sort specifically by C locale
|
||||
def locale_aware_by_first_item(data):
|
||||
return locale.strxfrm(data[0])
|
||||
save_locale = locale.getdefaultlocale()
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
sorted_content = sorted(final_content, key=locale_aware_by_first_item)
|
||||
locale.setlocale(locale.LC_ALL, save_locale)
|
||||
return sorted_content
|
@ -1,45 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from oslo_log import log
|
||||
|
||||
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
|
||||
def merged_list_from_content(final_content, deployment_keys, collector_name):
|
||||
final_list = []
|
||||
for depkey in deployment_keys:
|
||||
if depkey in final_content:
|
||||
deployments = final_content[depkey]
|
||||
if not isinstance(deployments, list):
|
||||
logger.warn(
|
||||
'Deployment-key %s was found but does not contain a '
|
||||
'list.' % (depkey,))
|
||||
continue
|
||||
logger.debug(
|
||||
'Deployment found for %s' % (depkey,))
|
||||
for deployment in deployments:
|
||||
if 'name' not in deployment:
|
||||
logger.warn(
|
||||
'No name found for a deployment under %s.' %
|
||||
(depkey,))
|
||||
continue
|
||||
if deployment.get('group', 'Heat::Ungrouped') in (
|
||||
'os-apply-config', 'Heat::Ungrouped'):
|
||||
final_list.append((deployment['name'],
|
||||
deployment['config']))
|
||||
final_list.insert(0, (collector_name, final_content))
|
||||
return final_list
|
@ -1,100 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import calendar
|
||||
import json
|
||||
import time
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
|
||||
from os_collect_config import common
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import merger
|
||||
|
||||
CONF = cfg.CONF
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('metadata-url',
|
||||
help='URL to query for metadata'),
|
||||
cfg.FloatOpt('timeout', default=10,
|
||||
help='Seconds to wait for the connection and read request'
|
||||
' timeout.')
|
||||
]
|
||||
name = 'request'
|
||||
|
||||
|
||||
class Collector(object):
|
||||
def __init__(self, requests_impl=common.requests):
|
||||
self._requests_impl = requests_impl
|
||||
self._session = requests_impl.Session()
|
||||
self.last_modified = None
|
||||
|
||||
def check_fetch_content(self, headers):
|
||||
'''Raises RequestMetadataNotAvailable if metadata should not be
|
||||
fetched.
|
||||
'''
|
||||
|
||||
# no last-modified header, so fetch
|
||||
lm = headers.get('last-modified')
|
||||
if not lm:
|
||||
return
|
||||
|
||||
last_modified = calendar.timegm(
|
||||
time.strptime(lm, '%a, %d %b %Y %H:%M:%S %Z'))
|
||||
|
||||
# first run, so fetch
|
||||
if not self.last_modified:
|
||||
return last_modified
|
||||
|
||||
if last_modified < self.last_modified:
|
||||
logger.warn(
|
||||
'Last-Modified is older than previous collection')
|
||||
|
||||
if last_modified <= self.last_modified:
|
||||
raise exc.RequestMetadataNotAvailable
|
||||
return last_modified
|
||||
|
||||
def collect(self):
|
||||
if CONF.request.metadata_url is None:
|
||||
logger.info('No metadata_url configured.')
|
||||
raise exc.RequestMetadataNotConfigured
|
||||
url = CONF.request.metadata_url
|
||||
timeout = CONF.request.timeout
|
||||
final_content = {}
|
||||
|
||||
try:
|
||||
head = self._session.head(url, timeout=timeout)
|
||||
last_modified = self.check_fetch_content(head.headers)
|
||||
|
||||
content = self._session.get(url, timeout=timeout)
|
||||
content.raise_for_status()
|
||||
self.last_modified = last_modified
|
||||
|
||||
except self._requests_impl.exceptions.RequestException as e:
|
||||
logger.warn(e)
|
||||
raise exc.RequestMetadataNotAvailable
|
||||
try:
|
||||
value = json.loads(content.text)
|
||||
except ValueError as e:
|
||||
logger.warn(
|
||||
'Failed to parse as json. (%s)' % e)
|
||||
raise exc.RequestMetadataNotAvailable
|
||||
final_content.update(value)
|
||||
|
||||
final_list = merger.merged_list_from_content(
|
||||
final_content, cfg.CONF.deployment_key, name)
|
||||
return final_list
|
@ -1,102 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import fixtures
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import cache
|
||||
|
||||
|
||||
class DummyConf(object):
|
||||
def __init__(self, cachedir):
|
||||
class CONFobj(object):
|
||||
def __init__(self, cachedir):
|
||||
self.cachedir = cachedir
|
||||
self.CONF = CONFobj(cachedir)
|
||||
|
||||
|
||||
class TestCache(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestCache, self).setUp()
|
||||
cache_root = self.useFixture(fixtures.TempDir())
|
||||
self.cache_dir = os.path.join(cache_root.path, 'cache')
|
||||
self.useFixture(fixtures.MonkeyPatch('os_collect_config.cache.cfg',
|
||||
DummyConf(self.cache_dir)))
|
||||
|
||||
def tearDown(self):
|
||||
super(TestCache, self).tearDown()
|
||||
|
||||
def test_cache(self):
|
||||
# Never seen, so changed is expected.
|
||||
(changed, path) = cache.store('foo', {'a': 1})
|
||||
self.assertTrue(changed)
|
||||
self.assertTrue(os.path.exists(self.cache_dir))
|
||||
self.assertTrue(os.path.exists(path))
|
||||
orig_path = '%s.orig' % path
|
||||
self.assertTrue(os.path.exists(orig_path))
|
||||
last_path = '%s.last' % path
|
||||
self.assertFalse(os.path.exists(last_path))
|
||||
|
||||
# .orig exists now but not .last so this will shortcut to changed
|
||||
(changed, path) = cache.store('foo', {'a': 2})
|
||||
self.assertTrue(changed)
|
||||
orig_path = '%s.orig' % path
|
||||
with open(path) as now:
|
||||
with open(orig_path) as then:
|
||||
self.assertNotEqual(now.read(), then.read())
|
||||
|
||||
# Saves the current copy as .last
|
||||
cache.commit('foo')
|
||||
last_path = '%s.last' % path
|
||||
self.assertTrue(os.path.exists(last_path))
|
||||
|
||||
# We committed this already, so we should have no changes
|
||||
(changed, path) = cache.store('foo', {'a': 2})
|
||||
self.assertFalse(changed)
|
||||
|
||||
cache.commit('foo')
|
||||
# Fully exercising the line-by-line matching now that a .last exists
|
||||
(changed, path) = cache.store('foo', {'a': 3})
|
||||
self.assertTrue(changed)
|
||||
self.assertTrue(os.path.exists(path))
|
||||
|
||||
# And the meta list
|
||||
list_path = cache.store_meta_list('foo_list', ['foo'])
|
||||
self.assertTrue(os.path.exists(list_path))
|
||||
with open(list_path) as list_file:
|
||||
list_list = json.loads(list_file.read())
|
||||
self.assertThat(list_list, matchers.IsInstance(list))
|
||||
self.assertIn(path, list_list)
|
||||
|
||||
def test_cache_ignores_json_inequality(self):
|
||||
content1 = u'{"a": "value-a", "b": "value-b"}'
|
||||
content2 = u'{"b": "value-b", "a": "value-a"}'
|
||||
value1 = json.loads(content1)
|
||||
value2 = json.loads(content2)
|
||||
self.assertEqual(value1, value2)
|
||||
(changed, path) = cache.store('content', value1)
|
||||
self.assertTrue(changed)
|
||||
cache.commit('content')
|
||||
(changed, path) = cache.store('content', value1)
|
||||
self.assertFalse(changed)
|
||||
(changed, path) = cache.store('content', value2)
|
||||
self.assertFalse(changed)
|
||||
|
||||
def test_commit_no_cache(self):
|
||||
self.assertIsNone(cache.commit('neversaved'))
|
@ -1,308 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from lxml import etree
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
import six.moves.urllib.parse as urlparse
|
||||
import testtools
|
||||
from testtools import content as test_content
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import cfn
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
|
||||
|
||||
META_DATA = {u'int1': 1,
|
||||
u'strfoo': u'foo',
|
||||
u'map_ab': {
|
||||
u'a': 'apple',
|
||||
u'b': 'banana',
|
||||
}}
|
||||
|
||||
|
||||
SOFTWARE_CONFIG_DATA = {
|
||||
u'old-style': u'value',
|
||||
u'deployments': [
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'Heat::Ungrouped',
|
||||
u'name': 'dep-name1',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config1': 'value1'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'os-apply-config',
|
||||
u'name': 'dep-name2',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config2': 'value2'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'name': 'dep-name3',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config3': 'value3'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [],
|
||||
u'group': 'ignore_me',
|
||||
u'name': 'ignore_me_name',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': 'ignore_me_config'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
SOFTWARE_CONFIG_IMPOSTER_DATA = {
|
||||
u'old-style': u'value',
|
||||
u'deployments': {
|
||||
u"not": u"a list"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeResponse(dict):
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeReqSession(object):
|
||||
|
||||
SESSION_META_DATA = META_DATA
|
||||
|
||||
def __init__(self, testcase, expected_netloc):
|
||||
self._test = testcase
|
||||
self._expected_netloc = expected_netloc
|
||||
self.verify = False
|
||||
|
||||
def get(self, url, params, headers, verify=None, timeout=None):
|
||||
self._test.addDetail('url', test_content.text_content(url))
|
||||
url = urlparse.urlparse(url)
|
||||
self._test.assertEqual(self._expected_netloc, url.netloc)
|
||||
self._test.assertEqual('/v1/', url.path)
|
||||
self._test.assertEqual('application/json',
|
||||
headers['Content-Type'])
|
||||
self._test.assertIn('SignatureVersion', params)
|
||||
self._test.assertEqual('2', params['SignatureVersion'])
|
||||
self._test.assertIn('Signature', params)
|
||||
self._test.assertIn('Action', params)
|
||||
self._test.assertEqual('DescribeStackResource',
|
||||
params['Action'])
|
||||
self._test.assertIn('LogicalResourceId', params)
|
||||
self._test.assertEqual('foo', params['LogicalResourceId'])
|
||||
self._test.assertEqual(10, timeout)
|
||||
root = etree.Element('DescribeStackResourceResponse')
|
||||
result = etree.SubElement(root, 'DescribeStackResourceResult')
|
||||
detail = etree.SubElement(result, 'StackResourceDetail')
|
||||
metadata = etree.SubElement(detail, 'Metadata')
|
||||
metadata.text = json.dumps(self.SESSION_META_DATA)
|
||||
if verify is not None:
|
||||
self.verify = True
|
||||
return FakeResponse(etree.tostring(root))
|
||||
|
||||
|
||||
class FakeRequests(object):
|
||||
exceptions = requests.exceptions
|
||||
|
||||
def __init__(self, testcase, expected_netloc='127.0.0.1:8000'):
|
||||
self._test = testcase
|
||||
self._expected_netloc = expected_netloc
|
||||
|
||||
def Session(self):
|
||||
|
||||
return FakeReqSession(self._test, self._expected_netloc)
|
||||
|
||||
|
||||
class FakeReqSessionSoftwareConfig(FakeReqSession):
|
||||
|
||||
SESSION_META_DATA = SOFTWARE_CONFIG_DATA
|
||||
|
||||
|
||||
class FakeRequestsSoftwareConfig(FakeRequests):
|
||||
|
||||
FAKE_SESSION = FakeReqSessionSoftwareConfig
|
||||
|
||||
def Session(self):
|
||||
return self.FAKE_SESSION(self._test, self._expected_netloc)
|
||||
|
||||
|
||||
class FakeReqSessionConfigImposter(FakeReqSession):
|
||||
|
||||
SESSION_META_DATA = SOFTWARE_CONFIG_IMPOSTER_DATA
|
||||
|
||||
|
||||
class FakeRequestsConfigImposter(FakeRequestsSoftwareConfig):
|
||||
|
||||
FAKE_SESSION = FakeReqSessionConfigImposter
|
||||
|
||||
|
||||
class FakeFailRequests(object):
|
||||
exceptions = requests.exceptions
|
||||
|
||||
class Session(object):
|
||||
def get(self, url, params, headers, verify=None, timeout=None):
|
||||
raise requests.exceptions.HTTPError(403, 'Forbidden')
|
||||
|
||||
|
||||
class TestCfnBase(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestCfnBase, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
self.hint_file = tempfile.NamedTemporaryFile()
|
||||
self.hint_file.write(u'http://127.0.0.1:8000'.encode('utf-8'))
|
||||
self.hint_file.flush()
|
||||
self.addCleanup(self.hint_file.close)
|
||||
collect.setup_conf()
|
||||
cfg.CONF.cfn.heat_metadata_hint = self.hint_file.name
|
||||
cfg.CONF.cfn.metadata_url = None
|
||||
cfg.CONF.cfn.path = ['foo.Metadata']
|
||||
cfg.CONF.cfn.access_key_id = '0123456789ABCDEF'
|
||||
cfg.CONF.cfn.secret_access_key = 'FEDCBA9876543210'
|
||||
|
||||
|
||||
class TestCfn(TestCfnBase):
|
||||
def test_collect_cfn(self):
|
||||
cfn_md = cfn.Collector(requests_impl=FakeRequests(self)).collect()
|
||||
self.assertThat(cfn_md, matchers.IsInstance(list))
|
||||
self.assertEqual('cfn', cfn_md[0][0])
|
||||
cfn_md = cfn_md[0][1]
|
||||
|
||||
for k in ('int1', 'strfoo', 'map_ab'):
|
||||
self.assertIn(k, cfn_md)
|
||||
self.assertEqual(cfn_md[k], META_DATA[k])
|
||||
|
||||
self.assertEqual('', self.log.output)
|
||||
|
||||
def test_collect_with_ca_cert(self):
|
||||
cfn.CONF.cfn.ca_certificate = "foo"
|
||||
collector = cfn.Collector(requests_impl=FakeRequests(self))
|
||||
collector.collect()
|
||||
self.assertTrue(collector._session.verify)
|
||||
|
||||
def test_collect_cfn_fail(self):
|
||||
cfn_collect = cfn.Collector(requests_impl=FakeFailRequests)
|
||||
self.assertRaises(exc.CfnMetadataNotAvailable, cfn_collect.collect)
|
||||
self.assertIn('Forbidden', self.log.output)
|
||||
|
||||
def test_collect_cfn_no_path(self):
|
||||
cfg.CONF.cfn.path = None
|
||||
cfn_collect = cfn.Collector(requests_impl=FakeRequests(self))
|
||||
self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect)
|
||||
self.assertIn('No path configured', self.log.output)
|
||||
|
||||
def test_collect_cfn_bad_path(self):
|
||||
cfg.CONF.cfn.path = ['foo']
|
||||
cfn_collect = cfn.Collector(requests_impl=FakeRequests(self))
|
||||
self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect)
|
||||
self.assertIn('Path not in format', self.log.output)
|
||||
|
||||
def test_collect_cfn_no_metadata_url(self):
|
||||
cfg.CONF.cfn.heat_metadata_hint = None
|
||||
cfn_collect = cfn.Collector(requests_impl=FakeRequests(self))
|
||||
self.assertRaises(exc.CfnMetadataNotConfigured, cfn_collect.collect)
|
||||
self.assertIn('No metadata_url configured', self.log.output)
|
||||
|
||||
def test_collect_cfn_missing_sub_path(self):
|
||||
cfg.CONF.cfn.path = ['foo.Metadata.not_there']
|
||||
cfn_collect = cfn.Collector(requests_impl=FakeRequests(self))
|
||||
self.assertRaises(exc.CfnMetadataNotAvailable, cfn_collect.collect)
|
||||
self.assertIn('Sub-key not_there does not exist', self.log.output)
|
||||
|
||||
def test_collect_cfn_sub_path(self):
|
||||
cfg.CONF.cfn.path = ['foo.Metadata.map_ab']
|
||||
cfn_collect = cfn.Collector(requests_impl=FakeRequests(self))
|
||||
content = cfn_collect.collect()
|
||||
self.assertThat(content, matchers.IsInstance(list))
|
||||
self.assertEqual('cfn', content[0][0])
|
||||
content = content[0][1]
|
||||
self.assertIn(u'b', content)
|
||||
self.assertEqual(u'banana', content[u'b'])
|
||||
|
||||
def test_collect_cfn_metadata_url_overrides_hint(self):
|
||||
cfg.CONF.cfn.metadata_url = 'http://127.0.1.1:8000/v1/'
|
||||
cfn_collect = cfn.Collector(
|
||||
requests_impl=FakeRequests(self,
|
||||
expected_netloc='127.0.1.1:8000'))
|
||||
cfn_collect.collect()
|
||||
|
||||
|
||||
class TestCfnSoftwareConfig(TestCfnBase):
|
||||
def test_collect_cfn_software_config(self):
|
||||
cfn_md = cfn.Collector(
|
||||
requests_impl=FakeRequestsSoftwareConfig(self)).collect()
|
||||
self.assertThat(cfn_md, matchers.IsInstance(list))
|
||||
self.assertEqual('cfn', cfn_md[0][0])
|
||||
cfn_config = cfn_md[0][1]
|
||||
self.assertThat(cfn_config, matchers.IsInstance(dict))
|
||||
self.assertEqual(set(['old-style', 'deployments']),
|
||||
set(cfn_config.keys()))
|
||||
self.assertIn('deployments', cfn_config)
|
||||
self.assertThat(cfn_config['deployments'], matchers.IsInstance(list))
|
||||
self.assertEqual(4, len(cfn_config['deployments']))
|
||||
deployment = cfn_config['deployments'][0]
|
||||
self.assertIn('inputs', deployment)
|
||||
self.assertThat(deployment['inputs'], matchers.IsInstance(list))
|
||||
self.assertEqual(1, len(deployment['inputs']))
|
||||
self.assertEqual('dep-name1', cfn_md[1][0])
|
||||
self.assertEqual('value1', cfn_md[1][1]['config1'])
|
||||
self.assertEqual('dep-name2', cfn_md[2][0])
|
||||
self.assertEqual('value2', cfn_md[2][1]['config2'])
|
||||
|
||||
def test_collect_cfn_deployments_not_list(self):
|
||||
cfn_md = cfn.Collector(
|
||||
requests_impl=FakeRequestsConfigImposter(self)).collect()
|
||||
self.assertEqual(1, len(cfn_md))
|
||||
self.assertEqual('cfn', cfn_md[0][0])
|
||||
self.assertIn('not', cfn_md[0][1]['deployments'])
|
||||
self.assertEqual('a list', cfn_md[0][1]['deployments']['not'])
|
@ -1,581 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import copy
|
||||
import json
|
||||
import os
|
||||
import signal
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
import extras
|
||||
import fixtures
|
||||
from keystoneclient import discover as ks_discover
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import cache
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
from os_collect_config.tests import test_cfn
|
||||
from os_collect_config.tests import test_ec2
|
||||
from os_collect_config.tests import test_heat
|
||||
from os_collect_config.tests import test_heat_local
|
||||
from os_collect_config.tests import test_local
|
||||
from os_collect_config.tests import test_request
|
||||
from os_collect_config.tests import test_zaqar
|
||||
|
||||
|
||||
def _setup_heat_local_metadata(test_case):
|
||||
test_case.useFixture(fixtures.NestedTempfile())
|
||||
local_md = tempfile.NamedTemporaryFile(delete=False)
|
||||
local_md.write(json.dumps(test_heat_local.META_DATA).encode('utf-8'))
|
||||
local_md.flush()
|
||||
return local_md.name
|
||||
|
||||
|
||||
def _setup_local_metadata(test_case):
|
||||
tmpdir = fixtures.TempDir()
|
||||
test_case.useFixture(tmpdir)
|
||||
local_data_path = tmpdir.path + '/local'
|
||||
with open(local_data_path, 'w') as local_data:
|
||||
json.dump(test_local.META_DATA, local_data)
|
||||
return tmpdir.path
|
||||
|
||||
|
||||
class TestCollect(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestCollect, self).setUp()
|
||||
self.useFixture(fixtures.FakeLogger())
|
||||
collect.setup_conf()
|
||||
self.addCleanup(cfg.CONF.reset)
|
||||
|
||||
def _call_main(self, fake_args):
|
||||
# make sure we don't run forever!
|
||||
if '--one-time' not in fake_args:
|
||||
fake_args.append('--one-time')
|
||||
collector_kwargs_map = {
|
||||
'ec2': {'requests_impl': test_ec2.FakeRequests},
|
||||
'cfn': {'requests_impl': test_cfn.FakeRequests(self)},
|
||||
'heat': {
|
||||
'keystoneclient': test_heat.FakeKeystoneClient(self),
|
||||
'heatclient': test_heat.FakeHeatClient(self)
|
||||
},
|
||||
'request': {'requests_impl': test_request.FakeRequests},
|
||||
'zaqar': {
|
||||
'keystoneclient': test_zaqar.FakeKeystoneClient(self),
|
||||
'zaqarclient': test_zaqar.FakeZaqarClient(self)
|
||||
},
|
||||
}
|
||||
return collect.__main__(args=fake_args,
|
||||
collector_kwargs_map=collector_kwargs_map)
|
||||
|
||||
def _fake_popen_call_main(self, occ_args):
|
||||
calls = []
|
||||
|
||||
def capture_popen(proc_args):
|
||||
calls.append(proc_args)
|
||||
return dict(returncode=0)
|
||||
self.useFixture(fixtures.FakePopen(capture_popen))
|
||||
self.assertEqual(0, self._call_main(occ_args))
|
||||
return calls
|
||||
|
||||
def test_main(self):
|
||||
expected_cmd = self.getUniqueString()
|
||||
cache_dir = self.useFixture(fixtures.TempDir())
|
||||
backup_cache_dir = self.useFixture(fixtures.TempDir())
|
||||
fake_metadata = _setup_heat_local_metadata(self)
|
||||
occ_args = [
|
||||
'os-collect-config',
|
||||
'--command',
|
||||
expected_cmd,
|
||||
'--cachedir',
|
||||
cache_dir.path,
|
||||
'--backup-cachedir',
|
||||
backup_cache_dir.path,
|
||||
'--config-file',
|
||||
'/dev/null',
|
||||
'--cfn-metadata-url',
|
||||
'http://127.0.0.1:8000/v1/',
|
||||
'--cfn-stack-name',
|
||||
'foo',
|
||||
'--cfn-path',
|
||||
'foo.Metadata',
|
||||
'--cfn-access-key-id',
|
||||
'0123456789ABCDEF',
|
||||
'--cfn-secret-access-key',
|
||||
'FEDCBA9876543210',
|
||||
'--heat_local-path',
|
||||
fake_metadata,
|
||||
'--heat-user-id',
|
||||
'FEDCBA9876543210',
|
||||
'--heat-password',
|
||||
'0123456789ABCDEF',
|
||||
'--heat-project-id',
|
||||
'9f6b09df-4d7f-4a33-8ec3-9924d8f46f10',
|
||||
'--heat-auth-url',
|
||||
'http://127.0.0.1:5000/v3',
|
||||
'--heat-stack-id',
|
||||
'a/c482680f-7238-403d-8f76-36acf0c8e0aa',
|
||||
'--heat-resource-name',
|
||||
'server'
|
||||
]
|
||||
calls = self._fake_popen_call_main(occ_args)
|
||||
# The Python 3 platform module makes a popen call, filter this out
|
||||
proc_calls = [call for call in calls if call['args'] == expected_cmd]
|
||||
self.assertEqual(len(proc_calls), 1)
|
||||
proc_args = proc_calls[0]
|
||||
for test_dir in (cache_dir, backup_cache_dir):
|
||||
list_path = os.path.join(test_dir.path, 'os_config_files.json')
|
||||
with open(list_path) as list_file:
|
||||
config_list = json.loads(list_file.read())
|
||||
self.assertThat(config_list, matchers.IsInstance(list))
|
||||
env_config_list = proc_args['env']['OS_CONFIG_FILES'].split(':')
|
||||
self.assertEqual(env_config_list, config_list)
|
||||
keys_found = set()
|
||||
for path in env_config_list:
|
||||
self.assertTrue(os.path.exists(path))
|
||||
with open(path) as cfg_file:
|
||||
contents = json.loads(cfg_file.read())
|
||||
keys_found.update(set(contents.keys()))
|
||||
# From test_ec2.FakeRequests
|
||||
self.assertIn("local-ipv4", keys_found)
|
||||
self.assertIn("reservation-id", keys_found)
|
||||
# From test_cfn.FakeRequests
|
||||
self.assertIn("int1", keys_found)
|
||||
self.assertIn("map_ab", keys_found)
|
||||
|
||||
def test_main_just_local(self):
|
||||
fake_md = _setup_heat_local_metadata(self)
|
||||
occ_args = [
|
||||
'os-collect-config',
|
||||
'--print',
|
||||
'--local-path', os.path.dirname(fake_md),
|
||||
'local',
|
||||
]
|
||||
self._call_main(occ_args)
|
||||
|
||||
def test_main_force_command(self):
|
||||
cache_dir = self.useFixture(fixtures.TempDir())
|
||||
backup_cache_dir = self.useFixture(fixtures.TempDir())
|
||||
fake_metadata = _setup_heat_local_metadata(self)
|
||||
occ_args = [
|
||||
'os-collect-config',
|
||||
'--command', 'foo',
|
||||
'--cachedir', cache_dir.path,
|
||||
'--backup-cachedir', backup_cache_dir.path,
|
||||
'--config-file', '/dev/null',
|
||||
'--heat_local-path', fake_metadata,
|
||||
'--force',
|
||||
]
|
||||
calls = self._fake_popen_call_main(occ_args)
|
||||
self.assertIn('OS_CONFIG_FILES', calls[0]['env'])
|
||||
cfg.CONF.reset()
|
||||
# First time caches data, run again, make sure we run command again
|
||||
calls = self._fake_popen_call_main(occ_args)
|
||||
self.assertIn('OS_CONFIG_FILES', calls[0]['env'])
|
||||
|
||||
def test_main_command_failed_no_caching(self):
|
||||
cache_dir = self.useFixture(fixtures.TempDir())
|
||||
backup_cache_dir = self.useFixture(fixtures.TempDir())
|
||||
fake_metadata = _setup_heat_local_metadata(self)
|
||||
occ_args = [
|
||||
'os-collect-config',
|
||||
'--command',
|
||||
'foo',
|
||||
'--cachedir',
|
||||
cache_dir.path,
|
||||
'--backup-cachedir',
|
||||
backup_cache_dir.path,
|
||||
'--config-file',
|
||||
'/dev/null',
|
||||
'--heat_local-path',
|
||||
fake_metadata,
|
||||
]
|
||||
calls = []
|
||||
|
||||
def capture_popen(proc_args):
|
||||
calls.append(proc_args)
|
||||
return dict(returncode=1)
|
||||
self.useFixture(fixtures.FakePopen(capture_popen))
|
||||
self.assertEqual(1, self._call_main(occ_args))
|
||||
for test_dir in (cache_dir, backup_cache_dir):
|
||||
cache_contents = os.listdir(test_dir.path)
|
||||
last_files = [n for n in cache_contents if n.endswith('last')]
|
||||
self.assertEqual([], last_files)
|
||||
|
||||
def test_main_no_command(self):
|
||||
fake_args = [
|
||||
'os-collect-config',
|
||||
'--config-file',
|
||||
'/dev/null',
|
||||
'--cfn-metadata-url',
|
||||
'http://127.0.0.1:8000/v1/',
|
||||
'--cfn-stack-name',
|
||||
'foo',
|
||||
'--cfn-path',
|
||||
'foo.Metadata',
|
||||
'--cfn-access-key-id',
|
||||
'0123456789ABCDEF',
|
||||
'--cfn-secret-access-key',
|
||||
'FEDCBA9876543210',
|
||||
]
|
||||
fake_metadata = _setup_heat_local_metadata(self)
|
||||
fake_args.append('--heat_local-path')
|
||||
fake_args.append(fake_metadata)
|
||||
output = self.useFixture(fixtures.StringStream('stdout'))
|
||||
self.useFixture(
|
||||
fixtures.MonkeyPatch('sys.stdout', output.stream))
|
||||
self._call_main(fake_args)
|
||||
out_struct = json.loads(output.getDetails()['stdout'].as_text())
|
||||
self.assertThat(out_struct, matchers.IsInstance(dict))
|
||||
self.assertIn('ec2', out_struct)
|
||||
self.assertIn('cfn', out_struct)
|
||||
|
||||
def test_main_print_cachedir(self):
|
||||
fake_cachedir = self.useFixture(fixtures.TempDir())
|
||||
fake_args = [
|
||||
'os-collect-config',
|
||||
'--cachedir', fake_cachedir.path,
|
||||
'--config-file', '/dev/null',
|
||||
'--print-cachedir',
|
||||
]
|
||||
|
||||
output = self.useFixture(fixtures.StringStream('stdout'))
|
||||
self.useFixture(
|
||||
fixtures.MonkeyPatch('sys.stdout', output.stream))
|
||||
self._call_main(fake_args)
|
||||
cache_dir = output.getDetails()['stdout'].as_text().strip()
|
||||
self.assertEqual(fake_cachedir.path, cache_dir)
|
||||
|
||||
def test_main_print_only(self):
|
||||
cache_dir = self.useFixture(fixtures.TempDir())
|
||||
backup_cache_dir = self.useFixture(fixtures.TempDir())
|
||||
fake_metadata = _setup_heat_local_metadata(self)
|
||||
args = [
|
||||
'os-collect-config',
|
||||
'--command', 'bar',
|
||||
'--cachedir', cache_dir.path,
|
||||
'--backup-cachedir', backup_cache_dir.path,
|
||||
'--config-file', '/dev/null',
|
||||
'--print',
|
||||
'--cfn-metadata-url',
|
||||
'http://127.0.0.1:8000/v1/',
|
||||
'--cfn-stack-name',
|
||||
'foo',
|
||||
'--cfn-path',
|
||||
'foo.Metadata',
|
||||
'--cfn-access-key-id',
|
||||
'0123456789ABCDEF',
|
||||
'--cfn-secret-access-key',
|
||||
'FEDCBA9876543210',
|
||||
'--heat_local-path', fake_metadata,
|
||||
]
|
||||
|
||||
def fake_popen(args):
|
||||
self.fail('Called command instead of printing')
|
||||
self.useFixture(fixtures.FakePopen(fake_popen))
|
||||
output = self.useFixture(fixtures.StringStream('stdout'))
|
||||
self.useFixture(
|
||||
fixtures.MonkeyPatch('sys.stdout', output.stream))
|
||||
self._call_main(args)
|
||||
out_struct = json.loads(output.getDetails()['stdout'].as_text())
|
||||
self.assertThat(out_struct, matchers.IsInstance(dict))
|
||||
self.assertIn('cfn', out_struct)
|
||||
self.assertIn('heat_local', out_struct)
|
||||
self.assertIn('ec2', out_struct)
|
||||
|
||||
def test_main_invalid_collector(self):
|
||||
fake_args = ['os-collect-config', 'invalid']
|
||||
self.assertRaises(exc.InvalidArguments, self._call_main, fake_args)
|
||||
|
||||
def test_main_sleep(self):
|
||||
class ExpectedException(Exception):
|
||||
pass
|
||||
|
||||
def fake_sleep(sleep_time):
|
||||
if sleep_time == 10:
|
||||
raise ExpectedException
|
||||
|
||||
self.useFixture(fixtures.MonkeyPatch('time.sleep', fake_sleep))
|
||||
try:
|
||||
collect.__main__(['os-collect-config', 'heat_local', '-i', '10',
|
||||
'-c', 'true'])
|
||||
except ExpectedException:
|
||||
pass
|
||||
|
||||
def test_main_no_sleep_with_no_command(self):
|
||||
def fake_sleep(sleep_time):
|
||||
raise Exception(cfg.CONF.command)
|
||||
|
||||
self.useFixture(fixtures.MonkeyPatch('time.sleep', fake_sleep))
|
||||
collect.__main__(['os-collect-config', 'heat_local', '--config-file',
|
||||
'/dev/null', '-i', '10'])
|
||||
|
||||
|
||||
class TestCollectAll(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestCollectAll, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
collect.setup_conf()
|
||||
self.cache_dir = self.useFixture(fixtures.TempDir())
|
||||
self.backup_cache_dir = self.useFixture(fixtures.TempDir())
|
||||
self.clean_conf = copy.copy(cfg.CONF)
|
||||
|
||||
def restore_copy():
|
||||
cfg.CONF = self.clean_conf
|
||||
self.addCleanup(restore_copy)
|
||||
|
||||
cfg.CONF.cachedir = self.cache_dir.path
|
||||
cfg.CONF.backup_cachedir = self.backup_cache_dir.path
|
||||
cfg.CONF.cfn.metadata_url = 'http://127.0.0.1:8000/v1/'
|
||||
cfg.CONF.cfn.stack_name = 'foo'
|
||||
cfg.CONF.cfn.path = ['foo.Metadata']
|
||||
cfg.CONF.cfn.access_key_id = '0123456789ABCDEF'
|
||||
cfg.CONF.cfn.secret_access_key = 'FEDCBA9876543210'
|
||||
cfg.CONF.heat_local.path = [_setup_heat_local_metadata(self)]
|
||||
cfg.CONF.heat.auth_url = 'http://127.0.0.1:5000/v3'
|
||||
cfg.CONF.heat.user_id = '0123456789ABCDEF'
|
||||
cfg.CONF.heat.password = 'FEDCBA9876543210'
|
||||
cfg.CONF.heat.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10'
|
||||
cfg.CONF.heat.stack_id = 'a/c482680f-7238-403d-8f76-36acf0c8e0aa'
|
||||
cfg.CONF.heat.resource_name = 'server'
|
||||
cfg.CONF.local.path = [_setup_local_metadata(self)]
|
||||
cfg.CONF.request.metadata_url = 'http://127.0.0.1:8000/my_metadata/'
|
||||
cfg.CONF.zaqar.auth_url = 'http://127.0.0.1:5000/v3'
|
||||
cfg.CONF.zaqar.user_id = '0123456789ABCDEF'
|
||||
cfg.CONF.zaqar.password = 'FEDCBA9876543210'
|
||||
cfg.CONF.zaqar.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10'
|
||||
cfg.CONF.zaqar.queue_id = '4f3f46d3-09f1-42a7-8c13-f91a5457192c'
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def _call_collect_all(self, mock_url_for, mock___init__, store,
|
||||
collector_kwargs_map=None, collectors=None):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = cfg.CONF.heat.auth_url
|
||||
if collector_kwargs_map is None:
|
||||
collector_kwargs_map = {
|
||||
'ec2': {'requests_impl': test_ec2.FakeRequests},
|
||||
'cfn': {'requests_impl': test_cfn.FakeRequests(self)},
|
||||
'heat': {
|
||||
'keystoneclient': test_heat.FakeKeystoneClient(self),
|
||||
'heatclient': test_heat.FakeHeatClient(self)
|
||||
},
|
||||
'request': {'requests_impl': test_request.FakeRequests},
|
||||
'zaqar': {
|
||||
'keystoneclient': test_zaqar.FakeKeystoneClient(self),
|
||||
'zaqarclient': test_zaqar.FakeZaqarClient(self)
|
||||
},
|
||||
}
|
||||
if collectors is None:
|
||||
collectors = cfg.CONF.collectors
|
||||
return collect.collect_all(
|
||||
collectors,
|
||||
store=store,
|
||||
collector_kwargs_map=collector_kwargs_map)
|
||||
|
||||
def _test_collect_all_store(self, collector_kwargs_map=None,
|
||||
expected_changed=None):
|
||||
(changed_keys, paths) = self._call_collect_all(
|
||||
store=True, collector_kwargs_map=collector_kwargs_map)
|
||||
if expected_changed is None:
|
||||
expected_changed = set(['heat_local', 'cfn', 'ec2',
|
||||
'heat', 'local', 'request', 'zaqar'])
|
||||
self.assertEqual(expected_changed, changed_keys)
|
||||
self.assertThat(paths, matchers.IsInstance(list))
|
||||
for path in paths:
|
||||
self.assertTrue(os.path.exists(path))
|
||||
self.assertTrue(os.path.exists('%s.orig' % path))
|
||||
|
||||
def test_collect_all_store(self):
|
||||
self._test_collect_all_store()
|
||||
|
||||
def test_collect_all_store_softwareconfig(self):
|
||||
soft_config_map = {
|
||||
'ec2': {'requests_impl': test_ec2.FakeRequests},
|
||||
'cfn': {
|
||||
'requests_impl': test_cfn.FakeRequestsSoftwareConfig(self)},
|
||||
'heat': {
|
||||
'keystoneclient': test_heat.FakeKeystoneClient(self),
|
||||
'heatclient': test_heat.FakeHeatClient(self)
|
||||
},
|
||||
'request': {'requests_impl': test_request.FakeRequests},
|
||||
'zaqar': {
|
||||
'keystoneclient': test_zaqar.FakeKeystoneClient(self),
|
||||
'zaqarclient': test_zaqar.FakeZaqarClient(self)
|
||||
},
|
||||
}
|
||||
expected_changed = set((
|
||||
'heat_local', 'ec2', 'cfn', 'heat', 'local', 'request',
|
||||
'dep-name1', 'dep-name2', 'dep-name3', 'zaqar'))
|
||||
self._test_collect_all_store(collector_kwargs_map=soft_config_map,
|
||||
expected_changed=expected_changed)
|
||||
|
||||
def test_collect_all_store_alt_order(self):
|
||||
# Ensure different than default
|
||||
new_list = list(reversed(cfg.CONF.collectors))
|
||||
(changed_keys, paths) = self._call_collect_all(
|
||||
store=True, collectors=new_list)
|
||||
self.assertEqual(set(cfg.CONF.collectors), changed_keys)
|
||||
self.assertThat(paths, matchers.IsInstance(list))
|
||||
expected_paths = [
|
||||
os.path.join(self.cache_dir.path, '%s.json' % collector)
|
||||
for collector in new_list]
|
||||
self.assertEqual(expected_paths, paths)
|
||||
|
||||
def test_collect_all_no_change(self):
|
||||
(changed_keys, paths) = self._call_collect_all(store=True)
|
||||
self.assertEqual(set(cfg.CONF.collectors), changed_keys)
|
||||
# Commit
|
||||
for changed in changed_keys:
|
||||
cache.commit(changed)
|
||||
(changed_keys, paths2) = self._call_collect_all(store=True)
|
||||
self.assertEqual(set(), changed_keys)
|
||||
self.assertEqual(paths, paths2)
|
||||
|
||||
def test_collect_all_no_change_softwareconfig(self):
|
||||
soft_config_map = {
|
||||
'ec2': {'requests_impl': test_ec2.FakeRequests},
|
||||
'cfn': {
|
||||
'requests_impl': test_cfn.FakeRequestsSoftwareConfig(self)},
|
||||
'heat': {
|
||||
'keystoneclient': test_heat.FakeKeystoneClient(self),
|
||||
'heatclient': test_heat.FakeHeatClient(self)
|
||||
},
|
||||
'request': {'requests_impl': test_request.FakeRequests},
|
||||
'zaqar': {
|
||||
'keystoneclient': test_zaqar.FakeKeystoneClient(self),
|
||||
'zaqarclient': test_zaqar.FakeZaqarClient(self)
|
||||
},
|
||||
}
|
||||
(changed_keys, paths) = self._call_collect_all(
|
||||
store=True, collector_kwargs_map=soft_config_map)
|
||||
expected_changed = set(cfg.CONF.collectors)
|
||||
expected_changed.add('dep-name1')
|
||||
expected_changed.add('dep-name2')
|
||||
expected_changed.add('dep-name3')
|
||||
self.assertEqual(expected_changed, changed_keys)
|
||||
# Commit
|
||||
for changed in changed_keys:
|
||||
cache.commit(changed)
|
||||
(changed_keys, paths2) = self._call_collect_all(
|
||||
store=True, collector_kwargs_map=soft_config_map)
|
||||
self.assertEqual(set(), changed_keys)
|
||||
self.assertEqual(paths, paths2)
|
||||
|
||||
def test_collect_all_nostore(self):
|
||||
(changed_keys, content) = self._call_collect_all(store=False)
|
||||
self.assertEqual(set(), changed_keys)
|
||||
self.assertThat(content, matchers.IsInstance(dict))
|
||||
for collector in cfg.CONF.collectors:
|
||||
self.assertIn(collector, content)
|
||||
self.assertThat(content[collector], matchers.IsInstance(dict))
|
||||
|
||||
def test_collect_all_ec2_unavailable(self):
|
||||
collector_kwargs_map = {
|
||||
'ec2': {'requests_impl': test_ec2.FakeFailRequests},
|
||||
'cfn': {'requests_impl': test_cfn.FakeRequests(self)}
|
||||
}
|
||||
(changed_keys, content) = self._call_collect_all(
|
||||
store=False, collector_kwargs_map=collector_kwargs_map)
|
||||
self.assertEqual(set(), changed_keys)
|
||||
self.assertThat(content, matchers.IsInstance(dict))
|
||||
self.assertNotIn('ec2', content)
|
||||
|
||||
def test_collect_all_cfn_unconfigured(self):
|
||||
collector_kwargs_map = {
|
||||
'cfn': {'requests_impl': test_cfn.FakeRequests(self)}
|
||||
}
|
||||
cfg.CONF.cfn.metadata_url = None
|
||||
(changed_keys, content) = self._call_collect_all(
|
||||
store=False, collector_kwargs_map=collector_kwargs_map,
|
||||
collectors=['heat_local', 'cfn'])
|
||||
self.assertIn('No metadata_url configured', self.log.output)
|
||||
self.assertNotIn('cfn', content)
|
||||
self.assertIn('heat_local', content)
|
||||
self.assertEqual(test_heat_local.META_DATA, content['heat_local'])
|
||||
|
||||
|
||||
class TestConf(testtools.TestCase):
|
||||
|
||||
def test_setup_conf(self):
|
||||
collect.setup_conf()
|
||||
self.assertEqual('/var/lib/os-collect-config', cfg.CONF.cachedir)
|
||||
self.assertTrue(extras.safe_hasattr(cfg.CONF, 'ec2'))
|
||||
self.assertTrue(extras.safe_hasattr(cfg.CONF, 'cfn'))
|
||||
|
||||
|
||||
class TestHup(testtools.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super(TestHup, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
|
||||
def fake_closerange(low, high):
|
||||
self.assertEqual(3, low)
|
||||
self.assertEqual(255, high)
|
||||
|
||||
def fake_execv(path, args):
|
||||
self.assertEqual(sys.argv[0], path)
|
||||
self.assertEqual(sys.argv, args)
|
||||
|
||||
self.useFixture(fixtures.MonkeyPatch('os.execv', fake_execv))
|
||||
self.useFixture(fixtures.MonkeyPatch('os.closerange', fake_closerange))
|
||||
|
||||
def test_reexec_self_signal(self):
|
||||
collect.reexec_self(signal.SIGHUP, None)
|
||||
self.assertIn('Signal received', self.log.output)
|
||||
|
||||
def test_reexec_self(self):
|
||||
collect.reexec_self()
|
||||
self.assertNotIn('Signal received', self.log.output)
|
||||
|
||||
|
||||
class TestFileHash(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestFileHash, self).setUp()
|
||||
|
||||
# Deletes tempfiles during teardown
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
|
||||
self.file_1 = tempfile.mkstemp()[1]
|
||||
with open(self.file_1, "w") as fp:
|
||||
fp.write("test string")
|
||||
|
||||
self.file_2 = tempfile.mkstemp()[1]
|
||||
with open(self.file_2, "w") as fp:
|
||||
fp.write("test string2")
|
||||
|
||||
def test_getfilehash_nofile(self):
|
||||
h = collect.getfilehash([])
|
||||
self.assertEqual(h, "d41d8cd98f00b204e9800998ecf8427e")
|
||||
|
||||
def test_getfilehash_onefile(self):
|
||||
h = collect.getfilehash([self.file_1])
|
||||
self.assertEqual(h, "6f8db599de986fab7a21625b7916589c")
|
||||
|
||||
def test_getfilehash_twofiles(self):
|
||||
h = collect.getfilehash([self.file_1, self.file_2])
|
||||
self.assertEqual(h, "a8e1b2b743037b1ec17b5d4b49369872")
|
||||
|
||||
def test_getfilehash_filenotfound(self):
|
||||
self.assertEqual(
|
||||
collect.getfilehash([self.file_1, self.file_2]),
|
||||
collect.getfilehash([self.file_1, "/i/dont/exist", self.file_2])
|
||||
)
|
@ -1,116 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import uuid
|
||||
|
||||
import fixtures
|
||||
import requests
|
||||
import six.moves.urllib.parse as urlparse
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import ec2
|
||||
from os_collect_config import exc
|
||||
|
||||
|
||||
META_DATA = {'local-ipv4': '192.0.2.1',
|
||||
'reservation-id': str(uuid.uuid1()),
|
||||
'local-hostname': 'foo',
|
||||
'ami-launch-index': '0',
|
||||
'public-hostname': 'foo',
|
||||
'hostname': 'foo',
|
||||
'ami-id': str(uuid.uuid1()),
|
||||
'instance-action': 'none',
|
||||
'public-ipv4': '192.0.2.1',
|
||||
'instance-type': 'flavor.small',
|
||||
'placement/': 'availability-zone',
|
||||
'placement/availability-zone': 'foo-az',
|
||||
'mpi/': 'foo-keypair',
|
||||
'mpi/foo-keypair': '192.0.2.1 slots=1',
|
||||
'block-device-mapping/': "ami\nroot\nephemeral0",
|
||||
'block-device-mapping/ami': 'vda',
|
||||
'block-device-mapping/root': '/dev/vda',
|
||||
'block-device-mapping/ephemeral0': '/dev/vdb',
|
||||
'public-keys/': '0=foo-keypair',
|
||||
'public-keys/0': 'openssh-key',
|
||||
'public-keys/0/': 'openssh-key',
|
||||
'public-keys/0/openssh-key': 'ssh-rsa AAAAAAAAABBBBBBBBCCCCCCCC',
|
||||
'instance-id': str(uuid.uuid1())}
|
||||
|
||||
|
||||
class FakeResponse(dict):
|
||||
def __init__(self, text):
|
||||
self.text = text
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeRequests(object):
|
||||
exceptions = requests.exceptions
|
||||
|
||||
class Session(object):
|
||||
def get(self, url, timeout=None):
|
||||
url = urlparse.urlparse(url)
|
||||
|
||||
if url.path == '/latest/meta-data/':
|
||||
# Remove keys which have anything after /
|
||||
ks = [x for x in META_DATA.keys() if (
|
||||
'/' not in x or not len(x.split('/')[1]))]
|
||||
return FakeResponse("\n".join(ks))
|
||||
|
||||
path = url.path
|
||||
path = path.replace('/latest/meta-data/', '')
|
||||
return FakeResponse(META_DATA[path])
|
||||
|
||||
|
||||
class FakeFailRequests(object):
|
||||
exceptions = requests.exceptions
|
||||
|
||||
class Session(object):
|
||||
def get(self, url, timeout=None):
|
||||
raise requests.exceptions.HTTPError(403, 'Forbidden')
|
||||
|
||||
|
||||
class TestEc2(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestEc2, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
|
||||
def test_collect_ec2(self):
|
||||
collect.setup_conf()
|
||||
ec2_md = ec2.Collector(requests_impl=FakeRequests).collect()
|
||||
self.assertThat(ec2_md, matchers.IsInstance(list))
|
||||
self.assertEqual('ec2', ec2_md[0][0])
|
||||
ec2_md = ec2_md[0][1]
|
||||
|
||||
for k in ('public-ipv4', 'instance-id', 'hostname'):
|
||||
self.assertIn(k, ec2_md)
|
||||
self.assertEqual(ec2_md[k], META_DATA[k])
|
||||
|
||||
self.assertEqual(ec2_md['block-device-mapping']['ami'], 'vda')
|
||||
|
||||
# SSH keys are special cases
|
||||
self.assertEqual(
|
||||
{'0': {'openssh-key': 'ssh-rsa AAAAAAAAABBBBBBBBCCCCCCCC'}},
|
||||
ec2_md['public-keys'])
|
||||
self.assertEqual('', self.log.output)
|
||||
|
||||
def test_collect_ec2_fail(self):
|
||||
collect.setup_conf()
|
||||
collect_ec2 = ec2.Collector(requests_impl=FakeFailRequests)
|
||||
self.assertRaises(exc.Ec2MetadataNotAvailable, collect_ec2.collect)
|
||||
self.assertIn('Forbidden', self.log.output)
|
@ -1,221 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fixtures
|
||||
from keystoneclient import discover as ks_discover
|
||||
from keystoneclient import exceptions as ks_exc
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import heat
|
||||
|
||||
|
||||
META_DATA = {u'int1': 1,
|
||||
u'strfoo': u'foo',
|
||||
u'map_ab': {
|
||||
u'a': 'apple',
|
||||
u'b': 'banana',
|
||||
}}
|
||||
|
||||
|
||||
SOFTWARE_CONFIG_DATA = {
|
||||
u'old-style': u'value',
|
||||
u'deployments': [
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'Heat::Ungrouped',
|
||||
u'name': 'dep-name1',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config1': 'value1'
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
SOFTWARE_CONFIG_IMPOSTER_DATA = {
|
||||
u'old-style': u'value',
|
||||
u'deployments': {
|
||||
u"not": u"a list"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class FakeKeystoneClient(object):
|
||||
|
||||
def __init__(self, testcase, configs=None):
|
||||
self._test = testcase
|
||||
self.service_catalog = self
|
||||
self.auth_token = 'atoken'
|
||||
if configs is None:
|
||||
configs = cfg.CONF.heat
|
||||
self.configs = configs
|
||||
|
||||
def Client(self, auth_url, user_id, password, project_id):
|
||||
self._test.assertEqual(self.configs.auth_url, auth_url)
|
||||
self._test.assertEqual(self.configs.user_id, user_id)
|
||||
self._test.assertEqual(self.configs.password, password)
|
||||
self._test.assertEqual(self.configs.project_id, project_id)
|
||||
return self
|
||||
|
||||
def url_for(self, service_type, endpoint_type):
|
||||
self._test.assertEqual('orchestration', service_type)
|
||||
self._test.assertEqual('publicURL', endpoint_type)
|
||||
return 'http://127.0.0.1:8004/v1'
|
||||
|
||||
def get_auth_ref(self):
|
||||
return 'this is an auth_ref'
|
||||
|
||||
|
||||
class FakeFailKeystoneClient(FakeKeystoneClient):
|
||||
|
||||
def Client(self, auth_url, user_id, password, project_id):
|
||||
raise ks_exc.AuthorizationFailure('Forbidden')
|
||||
|
||||
|
||||
class FakeHeatClient(object):
|
||||
def __init__(self, testcase):
|
||||
self._test = testcase
|
||||
self.resources = self
|
||||
|
||||
def Client(self, version, endpoint, token):
|
||||
self._test.assertEqual('1', version)
|
||||
self._test.assertEqual('http://127.0.0.1:8004/v1', endpoint)
|
||||
self._test.assertEqual('atoken', token)
|
||||
return self
|
||||
|
||||
def metadata(self, stack_id, resource_name):
|
||||
self._test.assertEqual(cfg.CONF.heat.stack_id, stack_id)
|
||||
self._test.assertEqual(cfg.CONF.heat.resource_name, resource_name)
|
||||
return META_DATA
|
||||
|
||||
|
||||
class FakeHeatClientSoftwareConfig(FakeHeatClient):
|
||||
|
||||
def metadata(self, stack_id, resource_name):
|
||||
return SOFTWARE_CONFIG_DATA
|
||||
|
||||
|
||||
class TestHeatBase(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestHeatBase, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
collect.setup_conf()
|
||||
cfg.CONF.heat.auth_url = 'http://127.0.0.1:5000/v3'
|
||||
cfg.CONF.heat.user_id = '0123456789ABCDEF'
|
||||
cfg.CONF.heat.password = 'FEDCBA9876543210'
|
||||
cfg.CONF.heat.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10'
|
||||
cfg.CONF.heat.stack_id = 'a/c482680f-7238-403d-8f76-36acf0c8e0aa'
|
||||
cfg.CONF.heat.resource_name = 'server'
|
||||
|
||||
|
||||
class TestHeat(TestHeatBase):
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_collect_heat(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = cfg.CONF.heat.auth_url
|
||||
heat_md = heat.Collector(keystoneclient=FakeKeystoneClient(self),
|
||||
heatclient=FakeHeatClient(self)).collect()
|
||||
self.assertThat(heat_md, matchers.IsInstance(list))
|
||||
self.assertEqual('heat', heat_md[0][0])
|
||||
heat_md = heat_md[0][1]
|
||||
|
||||
for k in ('int1', 'strfoo', 'map_ab'):
|
||||
self.assertIn(k, heat_md)
|
||||
self.assertEqual(heat_md[k], META_DATA[k])
|
||||
|
||||
# FIXME(yanyanhu): Temporary hack to deal with possible log
|
||||
# level setting for urllib3.connectionpool.
|
||||
self.assertTrue(
|
||||
self.log.output == '' or
|
||||
self.log.output == 'Starting new HTTP connection (1): 127.0.0.1\n')
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_collect_heat_fail(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = cfg.CONF.heat.auth_url
|
||||
heat_collect = heat.Collector(
|
||||
keystoneclient=FakeFailKeystoneClient(self),
|
||||
heatclient=FakeHeatClient(self))
|
||||
self.assertRaises(exc.HeatMetadataNotAvailable, heat_collect.collect)
|
||||
self.assertIn('Forbidden', self.log.output)
|
||||
|
||||
def test_collect_heat_no_auth_url(self):
|
||||
cfg.CONF.heat.auth_url = None
|
||||
heat_collect = heat.Collector()
|
||||
self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect)
|
||||
self.assertIn('No auth_url configured', self.log.output)
|
||||
|
||||
def test_collect_heat_no_password(self):
|
||||
cfg.CONF.heat.password = None
|
||||
heat_collect = heat.Collector()
|
||||
self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect)
|
||||
self.assertIn('No password configured', self.log.output)
|
||||
|
||||
def test_collect_heat_no_project_id(self):
|
||||
cfg.CONF.heat.project_id = None
|
||||
heat_collect = heat.Collector()
|
||||
self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect)
|
||||
self.assertIn('No project_id configured', self.log.output)
|
||||
|
||||
def test_collect_heat_no_user_id(self):
|
||||
cfg.CONF.heat.user_id = None
|
||||
heat_collect = heat.Collector()
|
||||
self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect)
|
||||
self.assertIn('No user_id configured', self.log.output)
|
||||
|
||||
def test_collect_heat_no_stack_id(self):
|
||||
cfg.CONF.heat.stack_id = None
|
||||
heat_collect = heat.Collector()
|
||||
self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect)
|
||||
self.assertIn('No stack_id configured', self.log.output)
|
||||
|
||||
def test_collect_heat_no_resource_name(self):
|
||||
cfg.CONF.heat.resource_name = None
|
||||
heat_collect = heat.Collector()
|
||||
self.assertRaises(exc.HeatMetadataNotConfigured, heat_collect.collect)
|
||||
self.assertIn('No resource_name configured', self.log.output)
|
||||
|
||||
|
||||
class TestHeatSoftwareConfig(TestHeatBase):
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_collect_heat(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = cfg.CONF.heat.auth_url
|
||||
heat_md = heat.Collector(
|
||||
keystoneclient=FakeKeystoneClient(self),
|
||||
heatclient=FakeHeatClientSoftwareConfig(self)).collect()
|
||||
self.assertThat(heat_md, matchers.IsInstance(list))
|
||||
self.assertEqual(2, len(heat_md))
|
||||
self.assertEqual('heat', heat_md[0][0])
|
||||
self.assertEqual(
|
||||
SOFTWARE_CONFIG_DATA['deployments'], heat_md[0][1]['deployments'])
|
||||
self.assertEqual(
|
||||
('dep-name1', {'config1': 'value1'}), heat_md[1])
|
@ -1,97 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import os.path
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from oslo_config import cfg
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import heat_local
|
||||
|
||||
|
||||
META_DATA = {u'localstrA': u'A',
|
||||
u'localint9': 9,
|
||||
u'localmap_xy': {
|
||||
u'x': 42,
|
||||
u'y': 'foo',
|
||||
}}
|
||||
|
||||
|
||||
class TestHeatLocal(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestHeatLocal, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
collect.setup_conf()
|
||||
self.orig_cfg_CONF = cfg.CONF
|
||||
|
||||
def tearDown(self):
|
||||
cfg.CONF = self.orig_cfg_CONF
|
||||
cfg.CONF.reset()
|
||||
super(TestHeatLocal, self).tearDown()
|
||||
|
||||
def _call_collect(self, *temp_name):
|
||||
cfg.CONF.heat_local.path = list(temp_name)
|
||||
md = heat_local.Collector().collect()
|
||||
self.assertEqual('heat_local', md[0][0])
|
||||
return md[0][1]
|
||||
|
||||
def test_collect_heat_local(self):
|
||||
with tempfile.NamedTemporaryFile() as md:
|
||||
md.write(json.dumps(META_DATA).encode('utf-8'))
|
||||
md.flush()
|
||||
local_md = self._call_collect(md.name)
|
||||
|
||||
self.assertThat(local_md, matchers.IsInstance(dict))
|
||||
|
||||
for k in ('localstrA', 'localint9', 'localmap_xy'):
|
||||
self.assertIn(k, local_md)
|
||||
self.assertEqual(local_md[k], META_DATA[k])
|
||||
|
||||
self.assertEqual('', self.log.output)
|
||||
|
||||
def test_collect_heat_local_twice(self):
|
||||
with tempfile.NamedTemporaryFile() as md:
|
||||
md.write(json.dumps(META_DATA).encode('utf-8'))
|
||||
md.flush()
|
||||
local_md = self._call_collect(md.name, md.name)
|
||||
|
||||
self.assertThat(local_md, matchers.IsInstance(dict))
|
||||
|
||||
for k in ('localstrA', 'localint9', 'localmap_xy'):
|
||||
self.assertIn(k, local_md)
|
||||
self.assertEqual(local_md[k], META_DATA[k])
|
||||
|
||||
self.assertEqual('', self.log.output)
|
||||
|
||||
def test_collect_heat_local_with_invalid_metadata(self):
|
||||
with tempfile.NamedTemporaryFile() as md:
|
||||
md.write("{'invalid' => 'INVALID'}".encode('utf-8'))
|
||||
md.flush()
|
||||
self.assertRaises(exc.HeatLocalMetadataNotAvailable,
|
||||
self._call_collect, md.name)
|
||||
self.assertIn('Local metadata not found', self.log.output)
|
||||
|
||||
def test_collect_ec2_nofile(self):
|
||||
tdir = self.useFixture(fixtures.TempDir())
|
||||
test_path = os.path.join(tdir.path, 'does-not-exist.json')
|
||||
self.assertRaises(exc.HeatLocalMetadataNotAvailable,
|
||||
self._call_collect, test_path)
|
||||
self.assertIn('Local metadata not found', self.log.output)
|
@ -1,125 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from keystoneclient import discover as ks_discover
|
||||
from keystoneclient import exceptions as ks_exc
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
import testtools
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import keystone
|
||||
from os_collect_config.tests import test_heat
|
||||
|
||||
|
||||
class FakeKeystoneClient(object):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
|
||||
def Client(self, *args, **kwargs):
|
||||
return self
|
||||
|
||||
@property
|
||||
def service_catalog(self):
|
||||
return {}
|
||||
|
||||
|
||||
class FakeFailGetAuthRef(FakeKeystoneClient):
|
||||
def get_auth_ref(self):
|
||||
raise ks_exc.AuthorizationFailed('Should not be called')
|
||||
|
||||
|
||||
class KeystoneTest(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(KeystoneTest, self).setUp()
|
||||
self.addCleanup(cfg.CONF.reset)
|
||||
collect.setup_conf()
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
self.cachedir = tempfile.mkdtemp()
|
||||
cfg.CONF.set_override('cache_dir', self.cachedir, group='keystone')
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_discover_fail(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.side_effect = ks_exc.DiscoveryFailure()
|
||||
ks = keystone.Keystone(
|
||||
'http://server.test:5000/v2.0', 'auser', 'apassword', 'aproject',
|
||||
test_heat.FakeKeystoneClient(self))
|
||||
self.assertEqual(ks.auth_url, 'http://server.test:5000/v3')
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_discover_v3_unsupported(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = None
|
||||
ks = keystone.Keystone(
|
||||
'http://server.test:5000/v2.0', 'auser', 'apassword', 'aproject',
|
||||
test_heat.FakeKeystoneClient(self))
|
||||
self.assertEqual(ks.auth_url, 'http://server.test:5000/v2.0')
|
||||
mock___init__.assert_called_with(auth_url='http://server.test:5000/')
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_cache_is_created(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = 'http://server.test:5000/'
|
||||
ks = keystone.Keystone(
|
||||
'http://server.test:5000/', 'auser', 'apassword', 'aproject',
|
||||
test_heat.FakeKeystoneClient(self))
|
||||
self.assertIsNotNone(ks.cache)
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def _make_ks(self, client, mock_url_for, mock___init__):
|
||||
class Configs(object):
|
||||
auth_url = 'http://server.test:5000/'
|
||||
user_id = 'auser'
|
||||
password = 'apassword'
|
||||
project_id = 'aproject'
|
||||
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = Configs.auth_url
|
||||
return keystone.Keystone(
|
||||
'http://server.test:5000/', 'auser', 'apassword', 'aproject',
|
||||
client(self, Configs))
|
||||
|
||||
def test_cache_auth_ref(self):
|
||||
ks = self._make_ks(test_heat.FakeKeystoneClient)
|
||||
auth_ref = ks.auth_ref
|
||||
# Client must fail now - we should make no client calls
|
||||
ks2 = self._make_ks(test_heat.FakeFailKeystoneClient)
|
||||
auth_ref2 = ks2.auth_ref
|
||||
self.assertEqual(auth_ref, auth_ref2)
|
||||
# And can we invalidate
|
||||
ks2.invalidate_auth_ref()
|
||||
# Can't use assertRaises because it is a @property
|
||||
try:
|
||||
ks2.auth_ref
|
||||
self.assertTrue(False, 'auth_ref should have failed.')
|
||||
except ks_exc.AuthorizationFailure:
|
||||
pass
|
||||
|
||||
def test_service_catalog(self):
|
||||
ks = self._make_ks(FakeKeystoneClient)
|
||||
service_catalog = ks.service_catalog
|
||||
ks2 = self._make_ks(FakeKeystoneClient)
|
||||
service_catalog2 = ks2.service_catalog
|
||||
self.assertEqual(service_catalog, service_catalog2)
|
||||
ks2.invalidate_auth_ref()
|
||||
service_catalog3 = ks.service_catalog
|
||||
self.assertEqual(service_catalog, service_catalog3)
|
@ -1,150 +0,0 @@
|
||||
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import json
|
||||
import locale
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
import fixtures
|
||||
from oslo_config import cfg
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import local
|
||||
|
||||
|
||||
META_DATA = {u'localstrA': u'A',
|
||||
u'localint9': 9,
|
||||
u'localmap_xy': {
|
||||
u'x': 42,
|
||||
u'y': 'foo',
|
||||
}}
|
||||
META_DATA2 = {u'localstrA': u'Z',
|
||||
u'localint9': 9}
|
||||
|
||||
|
||||
class TestLocal(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestLocal, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
self.tdir = tempfile.mkdtemp()
|
||||
collect.setup_conf()
|
||||
self.addCleanup(cfg.CONF.reset)
|
||||
cfg.CONF.register_cli_opts(local.opts, group='local')
|
||||
cfg.CONF.set_override(name='path',
|
||||
override=[self.tdir],
|
||||
group='local')
|
||||
|
||||
def _call_collect(self):
|
||||
md = local.Collector().collect()
|
||||
return md
|
||||
|
||||
def _setup_test_json(self, data, md_base='test.json'):
|
||||
md_name = os.path.join(self.tdir, md_base)
|
||||
with open(md_name, 'w') as md:
|
||||
md.write(json.dumps(data))
|
||||
return md_name
|
||||
|
||||
def test_collect_local(self):
|
||||
self._setup_test_json(META_DATA)
|
||||
local_md = self._call_collect()
|
||||
|
||||
self.assertThat(local_md, matchers.IsInstance(list))
|
||||
self.assertEqual(1, len(local_md))
|
||||
self.assertThat(local_md[0], matchers.IsInstance(tuple))
|
||||
self.assertEqual(2, len(local_md[0]))
|
||||
self.assertEqual('test.json', local_md[0][0])
|
||||
|
||||
only_md = local_md[0][1]
|
||||
self.assertThat(only_md, matchers.IsInstance(dict))
|
||||
|
||||
for k in ('localstrA', 'localint9', 'localmap_xy'):
|
||||
self.assertIn(k, only_md)
|
||||
self.assertEqual(only_md[k], META_DATA[k])
|
||||
|
||||
self.assertEqual('', self.log.output)
|
||||
|
||||
def test_collect_local_world_writable(self):
|
||||
md_name = self._setup_test_json(META_DATA)
|
||||
os.chmod(md_name, 0o666)
|
||||
self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect)
|
||||
self.assertIn('%s is world writable. This is a security risk.' %
|
||||
md_name, self.log.output)
|
||||
|
||||
def test_collect_local_world_writable_dir(self):
|
||||
self._setup_test_json(META_DATA)
|
||||
os.chmod(self.tdir, 0o666)
|
||||
self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect)
|
||||
self.assertIn('%s is world writable. This is a security risk.' %
|
||||
self.tdir, self.log.output)
|
||||
|
||||
def test_collect_local_owner_not_uid(self):
|
||||
self._setup_test_json(META_DATA)
|
||||
real_getuid = os.getuid
|
||||
|
||||
def fake_getuid():
|
||||
return real_getuid() + 1
|
||||
self.useFixture(fixtures.MonkeyPatch('os.getuid', fake_getuid))
|
||||
self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect)
|
||||
self.assertIn('%s is owned by another user. This is a security risk.' %
|
||||
self.tdir, self.log.output)
|
||||
|
||||
def test_collect_local_orders_multiple(self):
|
||||
self._setup_test_json(META_DATA, '00test.json')
|
||||
self._setup_test_json(META_DATA2, '99test.json')
|
||||
|
||||
# Monkey Patch os.listdir so it _always_ returns the wrong sort
|
||||
unpatched_listdir = os.listdir
|
||||
|
||||
def wrong_sort_listdir(path):
|
||||
ret = unpatched_listdir(path)
|
||||
save_locale = locale.getdefaultlocale()
|
||||
locale.setlocale(locale.LC_ALL, 'C')
|
||||
bad_sort = sorted(ret, reverse=True)
|
||||
locale.setlocale(locale.LC_ALL, save_locale)
|
||||
return bad_sort
|
||||
self.useFixture(fixtures.MonkeyPatch('os.listdir', wrong_sort_listdir))
|
||||
local_md = self._call_collect()
|
||||
|
||||
self.assertThat(local_md, matchers.IsInstance(list))
|
||||
self.assertEqual(2, len(local_md))
|
||||
self.assertThat(local_md[0], matchers.IsInstance(tuple))
|
||||
|
||||
self.assertEqual('00test.json', local_md[0][0])
|
||||
md1 = local_md[0][1]
|
||||
self.assertEqual(META_DATA, md1)
|
||||
|
||||
self.assertEqual('99test.json', local_md[1][0])
|
||||
md2 = local_md[1][1]
|
||||
self.assertEqual(META_DATA2, md2)
|
||||
|
||||
def test_collect_invalid_json_fail(self):
|
||||
self._setup_test_json(META_DATA)
|
||||
with open(os.path.join(self.tdir, 'bad.json'), 'w') as badjson:
|
||||
badjson.write('{')
|
||||
self.assertRaises(exc.LocalMetadataNotAvailable, self._call_collect)
|
||||
self.assertIn('is not valid JSON', self.log.output)
|
||||
|
||||
def test_collect_local_path_nonexist(self):
|
||||
cfg.CONF.set_override(name='path',
|
||||
override=['/this/doesnt/exist'],
|
||||
group='local')
|
||||
local_md = self._call_collect()
|
||||
self.assertThat(local_md, matchers.IsInstance(list))
|
||||
self.assertEqual(0, len(local_md))
|
@ -1,109 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import testtools
|
||||
|
||||
from os_collect_config import merger
|
||||
|
||||
|
||||
META_DATA = {u'int1': 1,
|
||||
u'strfoo': u'foo',
|
||||
u'map_ab': {
|
||||
u'a': 'apple',
|
||||
u'b': 'banana',
|
||||
}}
|
||||
|
||||
|
||||
SOFTWARE_CONFIG_DATA = {
|
||||
u'old-style': u'value',
|
||||
u'deployments': [
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'Heat::Ungrouped',
|
||||
u'name': 'dep-name1',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config1': 'value1'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'os-apply-config',
|
||||
u'name': 'dep-name2',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config2': 'value2'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'name': 'dep-name3',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config3': 'value3'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [],
|
||||
u'group': 'ignore_me',
|
||||
u'name': 'ignore_me_name',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': 'ignore_me_config'
|
||||
},
|
||||
{
|
||||
u'inputs': [], # to test missing name
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class TestMerger(testtools.TestCase):
|
||||
|
||||
def test_merged_list_from_content(self):
|
||||
req_md = merger.merged_list_from_content(
|
||||
SOFTWARE_CONFIG_DATA,
|
||||
['deployments'],
|
||||
'collectme')
|
||||
self.assertEqual(4, len(req_md))
|
||||
self.assertEqual(
|
||||
SOFTWARE_CONFIG_DATA['deployments'], req_md[0][1]['deployments'])
|
||||
self.assertEqual(
|
||||
('dep-name1', {'config1': 'value1'}), req_md[1])
|
||||
self.assertEqual(
|
||||
('dep-name2', {'config2': 'value2'}), req_md[2])
|
||||
self.assertEqual(
|
||||
('dep-name3', {'config3': 'value3'}), req_md[3])
|
@ -1,239 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import calendar
|
||||
import json
|
||||
import time
|
||||
|
||||
import fixtures
|
||||
from oslo_config import cfg
|
||||
import requests
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import request
|
||||
|
||||
|
||||
META_DATA = {u'int1': 1,
|
||||
u'strfoo': u'foo',
|
||||
u'map_ab': {
|
||||
u'a': 'apple',
|
||||
u'b': 'banana',
|
||||
}}
|
||||
|
||||
|
||||
SOFTWARE_CONFIG_DATA = {
|
||||
u'old-style': u'value',
|
||||
u'deployments': [
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'Heat::Ungrouped',
|
||||
u'name': 'dep-name1',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config1': 'value1'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'group': 'os-apply-config',
|
||||
u'name': 'dep-name2',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config2': 'value2'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [
|
||||
{
|
||||
u'type': u'String',
|
||||
u'name': u'input1',
|
||||
u'value': u'value1'
|
||||
}
|
||||
],
|
||||
u'name': 'dep-name3',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': {
|
||||
u'config3': 'value3'
|
||||
}
|
||||
},
|
||||
{
|
||||
u'inputs': [],
|
||||
u'group': 'ignore_me',
|
||||
u'name': 'ignore_me_name',
|
||||
u'outputs': None,
|
||||
u'options': None,
|
||||
u'config': 'ignore_me_config'
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
class FakeResponse(dict):
|
||||
def __init__(self, text, headers=None):
|
||||
self.text = text
|
||||
self.headers = headers
|
||||
|
||||
def raise_for_status(self):
|
||||
pass
|
||||
|
||||
|
||||
class FakeRequests(object):
|
||||
exceptions = requests.exceptions
|
||||
|
||||
class Session(object):
|
||||
def get(self, url, timeout=None):
|
||||
return FakeResponse(json.dumps(META_DATA))
|
||||
|
||||
def head(self, url, timeout=None):
|
||||
return FakeResponse('', headers={
|
||||
'last-modified': time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S %Z", time.gmtime())})
|
||||
|
||||
|
||||
class FakeFailRequests(object):
|
||||
exceptions = requests.exceptions
|
||||
|
||||
class Session(object):
|
||||
def get(self, url, timeout=None):
|
||||
raise requests.exceptions.HTTPError(403, 'Forbidden')
|
||||
|
||||
def head(self, url, timeout=None):
|
||||
raise requests.exceptions.HTTPError(403, 'Forbidden')
|
||||
|
||||
|
||||
class FakeRequestsSoftwareConfig(object):
|
||||
|
||||
class Session(object):
|
||||
def get(self, url, timeout=None):
|
||||
return FakeResponse(json.dumps(SOFTWARE_CONFIG_DATA))
|
||||
|
||||
def head(self, url, timeout=None):
|
||||
return FakeResponse('', headers={
|
||||
'last-modified': time.strftime(
|
||||
"%a, %d %b %Y %H:%M:%S %Z", time.gmtime())})
|
||||
|
||||
|
||||
class TestRequestBase(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestRequestBase, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
collect.setup_conf()
|
||||
cfg.CONF.request.metadata_url = 'http://127.0.0.1:8000/my_metadata'
|
||||
|
||||
|
||||
class TestRequest(TestRequestBase):
|
||||
|
||||
def test_collect_request(self):
|
||||
req_collect = request.Collector(requests_impl=FakeRequests)
|
||||
self.assertIsNone(req_collect.last_modified)
|
||||
req_md = req_collect.collect()
|
||||
self.assertIsNotNone(req_collect.last_modified)
|
||||
self.assertThat(req_md, matchers.IsInstance(list))
|
||||
self.assertEqual('request', req_md[0][0])
|
||||
req_md = req_md[0][1]
|
||||
|
||||
for k in ('int1', 'strfoo', 'map_ab'):
|
||||
self.assertIn(k, req_md)
|
||||
self.assertEqual(req_md[k], META_DATA[k])
|
||||
|
||||
self.assertEqual('', self.log.output)
|
||||
|
||||
def test_collect_request_fail(self):
|
||||
req_collect = request.Collector(requests_impl=FakeFailRequests)
|
||||
self.assertRaises(exc.RequestMetadataNotAvailable, req_collect.collect)
|
||||
self.assertIn('Forbidden', self.log.output)
|
||||
|
||||
def test_collect_request_no_metadata_url(self):
|
||||
cfg.CONF.request.metadata_url = None
|
||||
req_collect = request.Collector(requests_impl=FakeRequests)
|
||||
self.assertRaises(exc.RequestMetadataNotConfigured,
|
||||
req_collect.collect)
|
||||
self.assertIn('No metadata_url configured', self.log.output)
|
||||
|
||||
def test_check_fetch_content(self):
|
||||
req_collect = request.Collector()
|
||||
|
||||
now_secs = calendar.timegm(time.gmtime())
|
||||
now_str = time.strftime("%a, %d %b %Y %H:%M:%S %Z",
|
||||
time.gmtime(now_secs))
|
||||
|
||||
future_secs = calendar.timegm(time.gmtime()) + 10
|
||||
future_str = time.strftime("%a, %d %b %Y %H:%M:%S %Z",
|
||||
time.gmtime(future_secs))
|
||||
|
||||
past_secs = calendar.timegm(time.gmtime()) - 10
|
||||
past_str = time.strftime("%a, %d %b %Y %H:%M:%S %Z",
|
||||
time.gmtime(past_secs))
|
||||
|
||||
self.assertIsNone(req_collect.last_modified)
|
||||
|
||||
# first run always collects
|
||||
self.assertEqual(
|
||||
now_secs,
|
||||
req_collect.check_fetch_content({'last-modified': now_str}))
|
||||
|
||||
# second run unmodified, does not collect
|
||||
req_collect.last_modified = now_secs
|
||||
self.assertRaises(exc.RequestMetadataNotAvailable,
|
||||
req_collect.check_fetch_content,
|
||||
{'last-modified': now_str})
|
||||
|
||||
# run with later date, collects
|
||||
self.assertEqual(
|
||||
future_secs,
|
||||
req_collect.check_fetch_content({'last-modified': future_str}))
|
||||
|
||||
# run with earlier date, does not collect
|
||||
self.assertRaises(exc.RequestMetadataNotAvailable,
|
||||
req_collect.check_fetch_content,
|
||||
{'last-modified': past_str})
|
||||
|
||||
# run no last-modified header, collects
|
||||
self.assertIsNone(req_collect.check_fetch_content({}))
|
||||
|
||||
|
||||
class TestRequestSoftwareConfig(TestRequestBase):
|
||||
|
||||
def test_collect_request(self):
|
||||
req_collect = request.Collector(
|
||||
requests_impl=FakeRequestsSoftwareConfig)
|
||||
req_md = req_collect.collect()
|
||||
self.assertEqual(4, len(req_md))
|
||||
self.assertEqual(
|
||||
SOFTWARE_CONFIG_DATA['deployments'], req_md[0][1]['deployments'])
|
||||
self.assertEqual(
|
||||
('dep-name1', {'config1': 'value1'}), req_md[1])
|
||||
self.assertEqual(
|
||||
('dep-name2', {'config2': 'value2'}), req_md[2])
|
||||
self.assertEqual(
|
||||
('dep-name3', {'config3': 'value3'}), req_md[3])
|
@ -1,133 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import fixtures
|
||||
from keystoneclient import discover as ks_discover
|
||||
import mock
|
||||
from oslo_config import cfg
|
||||
import testtools
|
||||
from testtools import matchers
|
||||
from zaqarclient.queues.v1 import message
|
||||
|
||||
from os_collect_config import collect
|
||||
from os_collect_config import exc
|
||||
from os_collect_config.tests import test_heat
|
||||
from os_collect_config import zaqar
|
||||
|
||||
|
||||
class FakeKeystoneClient(test_heat.FakeKeystoneClient):
|
||||
|
||||
def url_for(self, service_type, endpoint_type):
|
||||
self._test.assertEqual('messaging', service_type)
|
||||
self._test.assertEqual('publicURL', endpoint_type)
|
||||
return 'http://127.0.0.1:8888/'
|
||||
|
||||
|
||||
class FakeZaqarClient(object):
|
||||
|
||||
def __init__(self, testcase):
|
||||
self._test = testcase
|
||||
|
||||
def Client(self, endpoint, conf, version):
|
||||
self._test.assertEqual(1.1, version)
|
||||
self._test.assertEqual('http://127.0.0.1:8888/', endpoint)
|
||||
return self
|
||||
|
||||
def queue(self, queue_id):
|
||||
self._test.assertEqual(
|
||||
'4f3f46d3-09f1-42a7-8c13-f91a5457192c', queue_id)
|
||||
return FakeQueue()
|
||||
|
||||
|
||||
class FakeQueue(object):
|
||||
|
||||
def pop(self):
|
||||
return iter([message.Message(
|
||||
queue=self, ttl=10, age=10, body=test_heat.META_DATA, href='')])
|
||||
|
||||
|
||||
class TestZaqar(testtools.TestCase):
|
||||
def setUp(self):
|
||||
super(TestZaqar, self).setUp()
|
||||
self.log = self.useFixture(fixtures.FakeLogger())
|
||||
self.useFixture(fixtures.NestedTempfile())
|
||||
collect.setup_conf()
|
||||
cfg.CONF.zaqar.auth_url = 'http://127.0.0.1:5000/v3'
|
||||
cfg.CONF.zaqar.user_id = '0123456789ABCDEF'
|
||||
cfg.CONF.zaqar.password = 'FEDCBA9876543210'
|
||||
cfg.CONF.zaqar.project_id = '9f6b09df-4d7f-4a33-8ec3-9924d8f46f10'
|
||||
cfg.CONF.zaqar.queue_id = '4f3f46d3-09f1-42a7-8c13-f91a5457192c'
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_collect_zaqar(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = cfg.CONF.zaqar.auth_url
|
||||
zaqar_md = zaqar.Collector(
|
||||
keystoneclient=FakeKeystoneClient(self, cfg.CONF.zaqar),
|
||||
zaqarclient=FakeZaqarClient(self)).collect()
|
||||
self.assertThat(zaqar_md, matchers.IsInstance(list))
|
||||
self.assertEqual('zaqar', zaqar_md[0][0])
|
||||
zaqar_md = zaqar_md[0][1]
|
||||
|
||||
for k in ('int1', 'strfoo', 'map_ab'):
|
||||
self.assertIn(k, zaqar_md)
|
||||
self.assertEqual(zaqar_md[k], test_heat.META_DATA[k])
|
||||
|
||||
@mock.patch.object(ks_discover.Discover, '__init__')
|
||||
@mock.patch.object(ks_discover.Discover, 'url_for')
|
||||
def test_collect_zaqar_fail(self, mock_url_for, mock___init__):
|
||||
mock___init__.return_value = None
|
||||
mock_url_for.return_value = cfg.CONF.zaqar.auth_url
|
||||
zaqar_collect = zaqar.Collector(
|
||||
keystoneclient=test_heat.FakeFailKeystoneClient(
|
||||
self, cfg.CONF.zaqar),
|
||||
zaqarclient=FakeZaqarClient(self))
|
||||
self.assertRaises(exc.ZaqarMetadataNotAvailable, zaqar_collect.collect)
|
||||
self.assertIn('Forbidden', self.log.output)
|
||||
|
||||
def test_collect_zaqar_no_auth_url(self):
|
||||
cfg.CONF.zaqar.auth_url = None
|
||||
zaqar_collect = zaqar.Collector()
|
||||
self.assertRaises(
|
||||
exc.ZaqarMetadataNotConfigured, zaqar_collect.collect)
|
||||
self.assertIn('No auth_url configured', self.log.output)
|
||||
|
||||
def test_collect_zaqar_no_password(self):
|
||||
cfg.CONF.zaqar.password = None
|
||||
zaqar_collect = zaqar.Collector()
|
||||
self.assertRaises(
|
||||
exc.ZaqarMetadataNotConfigured, zaqar_collect.collect)
|
||||
self.assertIn('No password configured', self.log.output)
|
||||
|
||||
def test_collect_zaqar_no_project_id(self):
|
||||
cfg.CONF.zaqar.project_id = None
|
||||
zaqar_collect = zaqar.Collector()
|
||||
self.assertRaises(
|
||||
exc.ZaqarMetadataNotConfigured, zaqar_collect.collect)
|
||||
self.assertIn('No project_id configured', self.log.output)
|
||||
|
||||
def test_collect_zaqar_no_user_id(self):
|
||||
cfg.CONF.zaqar.user_id = None
|
||||
zaqar_collect = zaqar.Collector()
|
||||
self.assertRaises(
|
||||
exc.ZaqarMetadataNotConfigured, zaqar_collect.collect)
|
||||
self.assertIn('No user_id configured', self.log.output)
|
||||
|
||||
def test_collect_zaqar_no_queue_id(self):
|
||||
cfg.CONF.zaqar.queue_id = None
|
||||
zaqar_collect = zaqar.Collector()
|
||||
self.assertRaises(
|
||||
exc.ZaqarMetadataNotConfigured, zaqar_collect.collect)
|
||||
self.assertIn('No queue_id configured', self.log.output)
|
@ -1,18 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
|
||||
import pbr.version
|
||||
|
||||
version_info = pbr.version.VersionInfo('os-collect-config')
|
@ -1,94 +0,0 @@
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from keystoneclient.v3 import client as keystoneclient
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log
|
||||
import six
|
||||
from zaqarclient.queues.v1 import client as zaqarclient
|
||||
|
||||
from os_collect_config import exc
|
||||
from os_collect_config import keystone
|
||||
|
||||
CONF = cfg.CONF
|
||||
logger = log.getLogger(__name__)
|
||||
|
||||
opts = [
|
||||
cfg.StrOpt('user-id',
|
||||
help='User ID for API authentication'),
|
||||
cfg.StrOpt('password',
|
||||
help='Password for API authentication'),
|
||||
cfg.StrOpt('project-id',
|
||||
help='ID of project for API authentication'),
|
||||
cfg.StrOpt('auth-url',
|
||||
help='URL for API authentication'),
|
||||
cfg.StrOpt('queue-id',
|
||||
help='ID of the queue to be checked'),
|
||||
]
|
||||
name = 'zaqar'
|
||||
|
||||
|
||||
class Collector(object):
|
||||
def __init__(self,
|
||||
keystoneclient=keystoneclient,
|
||||
zaqarclient=zaqarclient):
|
||||
self.keystoneclient = keystoneclient
|
||||
self.zaqarclient = zaqarclient
|
||||
|
||||
def collect(self):
|
||||
if CONF.zaqar.auth_url is None:
|
||||
logger.warn('No auth_url configured.')
|
||||
raise exc.ZaqarMetadataNotConfigured()
|
||||
if CONF.zaqar.password is None:
|
||||
logger.warn('No password configured.')
|
||||
raise exc.ZaqarMetadataNotConfigured()
|
||||
if CONF.zaqar.project_id is None:
|
||||
logger.warn('No project_id configured.')
|
||||
raise exc.ZaqarMetadataNotConfigured()
|
||||
if CONF.zaqar.user_id is None:
|
||||
logger.warn('No user_id configured.')
|
||||
raise exc.ZaqarMetadataNotConfigured()
|
||||
if CONF.zaqar.queue_id is None:
|
||||
logger.warn('No queue_id configured.')
|
||||
raise exc.ZaqarMetadataNotConfigured()
|
||||
|
||||
try:
|
||||
ks = keystone.Keystone(
|
||||
auth_url=CONF.zaqar.auth_url,
|
||||
user_id=CONF.zaqar.user_id,
|
||||
password=CONF.zaqar.password,
|
||||
project_id=CONF.zaqar.project_id,
|
||||
keystoneclient=self.keystoneclient).client
|
||||
endpoint = ks.service_catalog.url_for(
|
||||
service_type='messaging', endpoint_type='publicURL')
|
||||
logger.debug('Fetching metadata from %s' % endpoint)
|
||||
conf = {
|
||||
'auth_opts': {
|
||||
'backend': 'keystone',
|
||||
'options': {
|
||||
'os_auth_token': ks.auth_token,
|
||||
'os_project_id': CONF.zaqar.project_id
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
zaqar = self.zaqarclient.Client(endpoint, conf=conf, version=1.1)
|
||||
|
||||
queue = zaqar.queue(CONF.zaqar.queue_id)
|
||||
r = six.next(queue.pop())
|
||||
|
||||
return [('zaqar', r.body)]
|
||||
except Exception as e:
|
||||
logger.warn(str(e))
|
||||
raise exc.ZaqarMetadataNotAvailable()
|
@ -1,17 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
pbr>=1.6 # Apache-2.0
|
||||
|
||||
anyjson>=0.3.3 # BSD
|
||||
eventlet!=0.18.3,>=0.18.2 # MIT
|
||||
python-keystoneclient!=1.8.0,!=2.1.0,>=1.7.0 # Apache-2.0
|
||||
python-heatclient>=1.1.0 # Apache-2.0
|
||||
python-zaqarclient>=1.0.0 # Apache-2.0
|
||||
requests>=2.10.0 # Apache-2.0
|
||||
iso8601>=0.1.11 # MIT
|
||||
lxml>=2.3 # BSD
|
||||
oslo.config>=3.12.0 # Apache-2.0
|
||||
oslo.log>=1.14.0 # Apache-2.0
|
||||
six>=1.9.0 # MIT
|
||||
dogpile.cache>=0.6.1 # BSD
|
34
setup.cfg
34
setup.cfg
@ -1,34 +0,0 @@
|
||||
[metadata]
|
||||
name = os-collect-config
|
||||
author = OpenStack
|
||||
author-email = openstack-dev@lists.openstack.org
|
||||
summary = Collect and cache metadata, run hooks on changes.
|
||||
description-file =
|
||||
README.rst
|
||||
home-page = http://git.openstack.org/cgit/openstack/os-collect-config
|
||||
classifier =
|
||||
Development Status :: 4 - Beta
|
||||
Environment :: Console
|
||||
Environment :: OpenStack
|
||||
Intended Audience :: Developers
|
||||
Intended Audience :: Information Technology
|
||||
License :: OSI Approved :: Apache Software License
|
||||
Operating System :: OS Independent
|
||||
Programming Language :: Python
|
||||
|
||||
[files]
|
||||
packages =
|
||||
os_collect_config
|
||||
|
||||
[global]
|
||||
setup-hooks =
|
||||
pbr.hooks.setup_hook
|
||||
|
||||
[entry_points]
|
||||
console_scripts =
|
||||
os-collect-config = os_collect_config.collect:__main__
|
||||
|
||||
[egg_info]
|
||||
tag_build =
|
||||
tag_date = 0
|
||||
tag_svn_revision = 0
|
29
setup.py
29
setup.py
@ -1,29 +0,0 @@
|
||||
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||
import setuptools
|
||||
|
||||
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||
# setuptools if some other modules registered functions in `atexit`.
|
||||
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||
try:
|
||||
import multiprocessing # noqa
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
setuptools.setup(
|
||||
setup_requires=['pbr>=1.8'],
|
||||
pbr=True)
|
@ -1,14 +0,0 @@
|
||||
# The order of packages is significant, because pip processes them in the order
|
||||
# of appearance. Changing the order has an impact on the overall integration
|
||||
# process, which may cause wedges in the gate later.
|
||||
hacking<0.10,>=0.9.2
|
||||
|
||||
coverage>=3.6 # Apache-2.0
|
||||
discover # BSD
|
||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||
mock>=2.0 # BSD
|
||||
python-subunit>=0.0.18 # Apache-2.0/BSD
|
||||
sphinx!=1.3b1,<1.3,>=1.2.1 # BSD
|
||||
testrepository>=0.0.18 # Apache-2.0/BSD
|
||||
testscenarios>=0.4 # Apache-2.0/BSD
|
||||
testtools>=1.4.0 # MIT
|
33
tox.ini
33
tox.ini
@ -1,33 +0,0 @@
|
||||
[tox]
|
||||
minversion = 1.6
|
||||
skipsdist = True
|
||||
envlist = py27,pep8
|
||||
|
||||
[testenv]
|
||||
usedevelop = True
|
||||
install_command = pip install -U {opts} {packages}
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
deps = -r{toxinidir}/requirements.txt
|
||||
-r{toxinidir}/test-requirements.txt
|
||||
commands =
|
||||
python setup.py testr --slowest --testr-args='{posargs}'
|
||||
|
||||
[tox:jenkins]
|
||||
sitepackages = True
|
||||
|
||||
[testenv:pep8]
|
||||
commands = flake8
|
||||
|
||||
[testenv:cover]
|
||||
setenv = VIRTUAL_ENV={envdir}
|
||||
commands =
|
||||
python setup.py test --coverage --coverage-package-name=os_collect_config
|
||||
|
||||
[testenv:venv]
|
||||
commands = {posargs}
|
||||
|
||||
[flake8]
|
||||
# H405 multi line docstring summary not separated with an empty line
|
||||
ignore = H803,H405
|
||||
exclude = .venv,.tox,dist,doc,*.egg,./os_collect_config/openstack/*
|
||||
show-source = true
|
Loading…
Reference in New Issue
Block a user