Delete unused code

Change-Id: Iad36e5874c5ca15f37064a4741b363b3864117bb
Signed-off-by: Zhijiang Hu <hu.zhijiang@zte.com.cn>
This commit is contained in:
Zhijiang Hu 2016-09-01 04:24:05 -04:00
parent 59f51fc1d8
commit 177916854a
52 changed files with 7 additions and 6401 deletions

View File

@ -1,20 +1,19 @@
include run_tests.sh ChangeLog
include README.rst builddeb.sh
include README.rst
include MANIFEST.in pylintrc
include AUTHORS
include run_tests.py
include HACKING.rst
include LICENSE
include ChangeLog
include babel.cfg tox.ini
include openstack-common.conf
include glance/openstack/common/README
include glance/db/sqlalchemy/migrate_repo/README
include glance/db/sqlalchemy/migrate_repo/migrate.cfg
include glance/db/sqlalchemy/migrate_repo/versions/*.sql
include daisy/openstack/common/README
include daisy/db/sqlalchemy/migrate_repo/README
include daisy/db/sqlalchemy/migrate_repo/migrate.cfg
include daisy/db/sqlalchemy/migrate_repo/versions/*.sql
graft doc
graft etc
graft glance/locale
graft glance/tests
graft daisy/locale
graft daisy/tests
graft tools
global-exclude *.pyc

View File

@ -1,30 +0,0 @@
Metadata-Version: 1.1
Name: glance
Version: 2015.1.0
Summary: OpenStack Image Service
Home-page: http://www.openstack.org/
Author: OpenStack
Author-email: openstack-dev@lists.openstack.org
License: UNKNOWN
Description: ======
Glance
======
Glance is a project that defines services for discovering, registering,
retrieving and storing virtual machine images.
Use the following resources to learn more:
* `Official Glance documentation <http://docs.openstack.org/developer/glance/>`_
* `Official Client documentation <http://docs.openstack.org/developer/python-glanceclient/>`_
Platform: UNKNOWN
Classifier: Environment :: OpenStack
Classifier: Intended Audience :: Information Technology
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: POSIX :: Linux
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7

View File

@ -1,30 +0,0 @@
Metadata-Version: 1.1
Name: daisy
Version: 2015.1.0
Summary: OpenStack Image Service
Home-page: http://www.openstack.org/
Author: OpenStack
Author-email: openstack-dev@lists.openstack.org
License: UNKNOWN
Description: ======
Dasiy
======
Daisy is a project that defines services for discovering, registering,
retrieving and storing virtual machine images.
Use the following resources to learn more:
* `Official Daisy documentation <http://docs.openstack.org/developer/daisy/>`_
* `Official Client documentation <http://docs.openstack.org/developer/python-daisyclient/>`_
Platform: UNKNOWN
Classifier: Environment :: OpenStack
Classifier: Intended Audience :: Information Technology
Classifier: Intended Audience :: System Administrators
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Operating System :: POSIX :: Linux
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.7

View File

@ -1,525 +0,0 @@
.coveragerc
.mailmap
.testr.conf
AUTHORS
CONTRIBUTING.rst
ChangeLog
HACKING.rst
LICENSE
MANIFEST.in
README.rst
babel.cfg
openstack-common.conf
pylintrc
requirements.txt
run_tests.sh
setup.cfg
setup.py
test-requirements.txt
tox.ini
doc/source/architecture.rst
doc/source/authentication.rst
doc/source/cache.rst
doc/source/common-image-properties.rst
doc/source/conf.py
doc/source/configuring.rst
doc/source/controllingservers.rst
doc/source/db.rst
doc/source/formats.rst
doc/source/daisyapi.rst
doc/source/daisyclient.rst
doc/source/daisymetadefcatalogapi.rst
doc/source/identifiers.rst
doc/source/index.rst
doc/source/installing.rst
doc/source/metadefs-concepts.rst
doc/source/notifications.rst
doc/source/policies.rst
doc/source/property-protections.rst
doc/source/statuses.rst
doc/source/images/architecture.png
doc/source/images/image_status_transition.png
doc/source/images_src/architecture.graphml
doc/source/images_src/image_status_transition.dot
doc/source/images_src/image_status_transition.png
doc/source/man/footer.rst
doc/source/man/general_options.rst
doc/source/man/daisyapi.rst
doc/source/man/daisycachecleaner.rst
doc/source/man/daisycachemanage.rst
doc/source/man/daisycacheprefetcher.rst
doc/source/man/daisycachepruner.rst
doc/source/man/daisycontrol.rst
doc/source/man/daisymanage.rst
doc/source/man/daisyregistry.rst
doc/source/man/daisyreplicator.rst
doc/source/man/daisyscrubber.rst
doc/source/man/openstack_options.rst
etc/daisy-api-paste.ini
etc/daisy-api.conf
etc/daisy-cache.conf
etc/daisy-manage.conf
etc/daisy-registry-paste.ini
etc/daisy-registry.conf
etc/daisy-scrubber.conf
etc/daisy-search-paste.ini
etc/daisy-search.conf
etc/daisy-swift.conf.sample
etc/policy.json
etc/property-protections-policies.conf.sample
etc/property-protections-roles.conf.sample
etc/schema-image.json
etc/search-policy.json
etc/metadefs/README
etc/metadefs/compute-aggr-disk-filter.json
etc/metadefs/compute-aggr-iops-filter.json
etc/metadefs/compute-aggr-num-instances.json
etc/metadefs/compute-guest-shutdown.json
etc/metadefs/compute-host-capabilities.json
etc/metadefs/compute-hypervisor.json
etc/metadefs/compute-instance-data.json
etc/metadefs/compute-libvirt-image.json
etc/metadefs/compute-libvirt.json
etc/metadefs/compute-quota.json
etc/metadefs/compute-randomgen.json
etc/metadefs/compute-trust.json
etc/metadefs/compute-vcputopology.json
etc/metadefs/compute-vmware-flavor.json
etc/metadefs/compute-vmware-quota-flavor.json
etc/metadefs/compute-vmware.json
etc/metadefs/compute-watchdog.json
etc/metadefs/compute-xenapi.json
etc/metadefs/daisy-common-image-props.json
etc/metadefs/operating-system.json
etc/metadefs/software-databases.json
etc/metadefs/software-runtimes.json
etc/metadefs/software-webservers.json
etc/oslo-config-generator/daisy-api.conf
etc/oslo-config-generator/daisy-cache.conf
etc/oslo-config-generator/daisy-manage.conf
etc/oslo-config-generator/daisy-registry.conf
etc/oslo-config-generator/daisy-scrubber.conf
daisy/__init__.py
daisy/context.py
daisy/gateway.py
daisy/i18n.py
daisy/listener.py
daisy/location.py
daisy/notifier.py
daisy/opts.py
daisy/schema.py
daisy/scrubber.py
daisy/service.py
daisy/version.py
daisy.egg-info/PKG-INFO
daisy.egg-info/SOURCES.txt
daisy.egg-info/dependency_links.txt
daisy.egg-info/entry_points.txt
daisy.egg-info/not-zip-safe
daisy.egg-info/pbr.json
daisy.egg-info/requires.txt
daisy.egg-info/top_level.txt
daisy/api/__init__.py
daisy/api/authorization.py
daisy/api/cached_images.py
daisy/api/common.py
daisy/api/policy.py
daisy/api/property_protections.py
daisy/api/versions.py
daisy/api/middleware/__init__.py
daisy/api/middleware/cache.py
daisy/api/middleware/cache_manage.py
daisy/api/middleware/context.py
daisy/api/middleware/gzip.py
daisy/api/middleware/version_negotiation.py
daisy/api/v1/__init__.py
daisy/api/v1/controller.py
daisy/api/v1/filters.py
daisy/api/v1/images.py
daisy/api/v1/members.py
daisy/api/v1/router.py
daisy/api/v1/upload_utils.py
daisy/api/v2/__init__.py
daisy/api/v2/image_actions.py
daisy/api/v2/image_data.py
daisy/api/v2/image_members.py
daisy/api/v2/image_tags.py
daisy/api/v2/images.py
daisy/api/v2/metadef_namespaces.py
daisy/api/v2/metadef_objects.py
daisy/api/v2/metadef_properties.py
daisy/api/v2/metadef_resource_types.py
daisy/api/v2/metadef_tags.py
daisy/api/v2/router.py
daisy/api/v2/schemas.py
daisy/api/v2/tasks.py
daisy/api/v2/model/__init__.py
daisy/api/v2/model/metadef_namespace.py
daisy/api/v2/model/metadef_object.py
daisy/api/v2/model/metadef_property_item_type.py
daisy/api/v2/model/metadef_property_type.py
daisy/api/v2/model/metadef_resource_type.py
daisy/api/v2/model/metadef_tag.py
daisy/artifacts/__init__.py
daisy/async/__init__.py
daisy/async/taskflow_executor.py
daisy/async/utils.py
daisy/async/flows/__init__.py
daisy/async/flows/base_import.py
daisy/async/flows/convert.py
daisy/async/flows/introspect.py
daisy/cmd/__init__.py
daisy/cmd/agent_notification.py
daisy/cmd/api.py
daisy/cmd/cache_cleaner.py
daisy/cmd/cache_manage.py
daisy/cmd/cache_prefetcher.py
daisy/cmd/cache_pruner.py
daisy/cmd/control.py
daisy/cmd/index.py
daisy/cmd/manage.py
daisy/cmd/registry.py
daisy/cmd/replicator.py
daisy/cmd/scrubber.py
daisy/cmd/search.py
daisy/cmd/orchestration.py
daisy/common/__init__.py
daisy/common/auth.py
daisy/common/client.py
daisy/common/config.py
daisy/common/crypt.py
daisy/common/exception.py
daisy/common/jsonpatchvalidator.py
daisy/common/property_utils.py
daisy/common/rpc.py
daisy/common/semver_db.py
daisy/common/store_utils.py
daisy/common/swift_store_utils.py
daisy/common/utils.py
daisy/common/wsgi.py
daisy/common/wsme_utils.py
daisy/common/artifacts/__init__.py
daisy/common/artifacts/declarative.py
daisy/common/artifacts/definitions.py
daisy/common/artifacts/loader.py
daisy/common/artifacts/serialization.py
daisy/common/location_strategy/__init__.py
daisy/common/location_strategy/location_order.py
daisy/common/location_strategy/store_type.py
daisy/common/scripts/__init__.py
daisy/common/scripts/utils.py
daisy/common/scripts/image_import/__init__.py
daisy/common/scripts/image_import/main.py
daisy/contrib/__init__.py
daisy/contrib/plugins/__init__.py
daisy/contrib/plugins/artifacts_sample/__init__.py
daisy/contrib/plugins/artifacts_sample/base.py
daisy/contrib/plugins/artifacts_sample/setup.cfg
daisy/contrib/plugins/artifacts_sample/setup.py
daisy/contrib/plugins/artifacts_sample/v1/__init__.py
daisy/contrib/plugins/artifacts_sample/v1/artifact.py
daisy/contrib/plugins/artifacts_sample/v2/__init__.py
daisy/contrib/plugins/artifacts_sample/v2/artifact.py
daisy/contrib/plugins/image_artifact/__init__.py
daisy/contrib/plugins/image_artifact/requirements.txt
daisy/contrib/plugins/image_artifact/setup.cfg
daisy/contrib/plugins/image_artifact/setup.py
daisy/contrib/plugins/image_artifact/version_selector.py
daisy/contrib/plugins/image_artifact/v1/__init__.py
daisy/contrib/plugins/image_artifact/v1/image.py
daisy/contrib/plugins/image_artifact/v1_1/__init__.py
daisy/contrib/plugins/image_artifact/v1_1/image.py
daisy/contrib/plugins/image_artifact/v2/__init__.py
daisy/contrib/plugins/image_artifact/v2/image.py
daisy/db/__init__.py
daisy/db/metadata.py
daisy/db/migration.py
daisy/db/registry/__init__.py
daisy/db/registry/api.py
daisy/db/simple/__init__.py
daisy/db/simple/api.py
daisy/db/sqlalchemy/__init__.py
daisy/db/sqlalchemy/api.py
daisy/db/sqlalchemy/artifacts.py
daisy/db/sqlalchemy/metadata.py
daisy/db/sqlalchemy/models.py
daisy/db/sqlalchemy/models_artifacts.py
daisy/db/sqlalchemy/models_metadef.py
daisy/db/sqlalchemy/metadef_api/__init__.py
daisy/db/sqlalchemy/metadef_api/namespace.py
daisy/db/sqlalchemy/metadef_api/object.py
daisy/db/sqlalchemy/metadef_api/property.py
daisy/db/sqlalchemy/metadef_api/resource_type.py
daisy/db/sqlalchemy/metadef_api/resource_type_association.py
daisy/db/sqlalchemy/metadef_api/tag.py
daisy/db/sqlalchemy/metadef_api/utils.py
daisy/db/sqlalchemy/migrate_repo/README
daisy/db/sqlalchemy/migrate_repo/__init__.py
daisy/db/sqlalchemy/migrate_repo/manage.py
daisy/db/sqlalchemy/migrate_repo/migrate.cfg
daisy/db/sqlalchemy/migrate_repo/schema.py
daisy/db/sqlalchemy/migrate_repo/versions/001_add_images_table.py
daisy/db/sqlalchemy/migrate_repo/versions/002_add_image_properties_table.py
daisy/db/sqlalchemy/migrate_repo/versions/003_add_disk_format.py
daisy/db/sqlalchemy/migrate_repo/versions/003_sqlite_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/003_sqlite_upgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/004_add_checksum.py
daisy/db/sqlalchemy/migrate_repo/versions/005_size_big_integer.py
daisy/db/sqlalchemy/migrate_repo/versions/006_key_to_name.py
daisy/db/sqlalchemy/migrate_repo/versions/006_mysql_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/006_mysql_upgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/006_sqlite_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/006_sqlite_upgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/007_add_owner.py
daisy/db/sqlalchemy/migrate_repo/versions/008_add_image_members_table.py
daisy/db/sqlalchemy/migrate_repo/versions/009_add_mindisk_and_minram.py
daisy/db/sqlalchemy/migrate_repo/versions/010_default_update_at.py
daisy/db/sqlalchemy/migrate_repo/versions/011_make_mindisk_and_minram_notnull.py
daisy/db/sqlalchemy/migrate_repo/versions/011_sqlite_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/011_sqlite_upgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/012_id_to_uuid.py
daisy/db/sqlalchemy/migrate_repo/versions/013_add_protected.py
daisy/db/sqlalchemy/migrate_repo/versions/013_sqlite_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/014_add_image_tags_table.py
daisy/db/sqlalchemy/migrate_repo/versions/015_quote_swift_credentials.py
daisy/db/sqlalchemy/migrate_repo/versions/016_add_status_image_member.py
daisy/db/sqlalchemy/migrate_repo/versions/016_sqlite_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/017_quote_encrypted_swift_credentials.py
daisy/db/sqlalchemy/migrate_repo/versions/018_add_image_locations_table.py
daisy/db/sqlalchemy/migrate_repo/versions/019_migrate_image_locations.py
daisy/db/sqlalchemy/migrate_repo/versions/020_drop_images_table_location.py
daisy/db/sqlalchemy/migrate_repo/versions/021_set_engine_mysql_innodb.py
daisy/db/sqlalchemy/migrate_repo/versions/022_image_member_index.py
daisy/db/sqlalchemy/migrate_repo/versions/023_placeholder.py
daisy/db/sqlalchemy/migrate_repo/versions/024_placeholder.py
daisy/db/sqlalchemy/migrate_repo/versions/025_placeholder.py
daisy/db/sqlalchemy/migrate_repo/versions/026_add_location_storage_information.py
daisy/db/sqlalchemy/migrate_repo/versions/027_checksum_index.py
daisy/db/sqlalchemy/migrate_repo/versions/028_owner_index.py
daisy/db/sqlalchemy/migrate_repo/versions/029_location_meta_data_pickle_to_string.py
daisy/db/sqlalchemy/migrate_repo/versions/030_add_tasks_table.py
daisy/db/sqlalchemy/migrate_repo/versions/031_remove_duplicated_locations.py
daisy/db/sqlalchemy/migrate_repo/versions/032_add_task_info_table.py
daisy/db/sqlalchemy/migrate_repo/versions/033_add_location_status.py
daisy/db/sqlalchemy/migrate_repo/versions/034_add_virtual_size.py
daisy/db/sqlalchemy/migrate_repo/versions/035_add_metadef_tables.py
daisy/db/sqlalchemy/migrate_repo/versions/036_rename_metadef_schema_columns.py
daisy/db/sqlalchemy/migrate_repo/versions/037_add_changes_to_satisfy_models.py
daisy/db/sqlalchemy/migrate_repo/versions/037_sqlite_downgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/037_sqlite_upgrade.sql
daisy/db/sqlalchemy/migrate_repo/versions/038_add_metadef_tags_table.py
daisy/db/sqlalchemy/migrate_repo/versions/039_add_changes_to_satisfy_models_metadef.py
daisy/db/sqlalchemy/migrate_repo/versions/040_add_changes_to_satisfy_metadefs_tags.py
daisy/db/sqlalchemy/migrate_repo/versions/041_add_artifact_tables.py
daisy/db/sqlalchemy/migrate_repo/versions/__init__.py
daisy/domain/__init__.py
daisy/domain/proxy.py
daisy/hacking/__init__.py
daisy/hacking/checks.py
daisy/image_cache/__init__.py
daisy/image_cache/base.py
daisy/image_cache/cleaner.py
daisy/image_cache/client.py
daisy/image_cache/prefetcher.py
daisy/image_cache/pruner.py
daisy/image_cache/drivers/__init__.py
daisy/image_cache/drivers/base.py
daisy/image_cache/drivers/sqlite.py
daisy/image_cache/drivers/xattr.py
daisy/locale/daisy-log-critical.pot
daisy/locale/daisy-log-error.pot
daisy/locale/daisy-log-info.pot
daisy/locale/daisy-log-warning.pot
daisy/locale/daisy.pot
daisy/locale/en_GB/LC_MESSAGES/daisy-log-info.po
daisy/locale/fr/LC_MESSAGES/daisy-log-info.po
daisy/locale/pt_BR/LC_MESSAGES/daisy-log-info.po
daisy/openstack/__init__.py
daisy/openstack/common/README
daisy/openstack/common/__init__.py
daisy/openstack/common/_i18n.py
daisy/openstack/common/eventlet_backdoor.py
daisy/openstack/common/fileutils.py
daisy/openstack/common/local.py
daisy/openstack/common/loopingcall.py
daisy/openstack/common/service.py
daisy/openstack/common/systemd.py
daisy/openstack/common/threadgroup.py
daisy/quota/__init__.py
daisy/registry/__init__.py
daisy/registry/api/__init__.py
daisy/registry/api/v1/__init__.py
daisy/registry/api/v1/images.py
daisy/registry/api/v1/members.py
daisy/registry/api/v2/__init__.py
daisy/registry/api/v2/rpc.py
daisy/registry/client/__init__.py
daisy/registry/client/v1/__init__.py
daisy/registry/client/v1/api.py
daisy/registry/client/v1/client.py
daisy/registry/client/v2/__init__.py
daisy/registry/client/v2/api.py
daisy/registry/client/v2/client.py
daisy/search/__init__.py
daisy/search/api/__init__.py
daisy/search/api/v0_1/__init__.py
daisy/search/api/v0_1/router.py
daisy/search/api/v0_1/search.py
daisy/search/plugins/__init__.py
daisy/search/plugins/base.py
daisy/search/plugins/images.py
daisy/search/plugins/images_notification_handler.py
daisy/search/plugins/metadefs.py
daisy/search/plugins/metadefs_notification_handler.py
daisy/orchestration/__init__.py
daisy/orchestration/manager.py
daisy/tests/__init__.py
daisy/tests/stubs.py
daisy/tests/test_hacking.py
daisy/tests/utils.py
daisy/tests/etc/daisy-swift.conf
daisy/tests/etc/policy.json
daisy/tests/etc/property-protections-policies.conf
daisy/tests/etc/property-protections.conf
daisy/tests/etc/schema-image.json
daisy/tests/functional/__init__.py
daisy/tests/functional/store_utils.py
daisy/tests/functional/test_api.py
daisy/tests/functional/test_bin_daisy_cache_manage.py
daisy/tests/functional/test_cache_middleware.py
daisy/tests/functional/test_client_exceptions.py
daisy/tests/functional/test_client_redirects.py
daisy/tests/functional/test_daisy_manage.py
daisy/tests/functional/test_gzip_middleware.py
daisy/tests/functional/test_logging.py
daisy/tests/functional/test_reload.py
daisy/tests/functional/test_scrubber.py
daisy/tests/functional/test_sqlite.py
daisy/tests/functional/test_ssl.py
daisy/tests/functional/db/__init__.py
daisy/tests/functional/db/base.py
daisy/tests/functional/db/base_artifacts.py
daisy/tests/functional/db/base_metadef.py
daisy/tests/functional/db/test_registry.py
daisy/tests/functional/db/test_rpc_endpoint.py
daisy/tests/functional/db/test_simple.py
daisy/tests/functional/db/test_sqlalchemy.py
daisy/tests/functional/v1/__init__.py
daisy/tests/functional/v1/test_api.py
daisy/tests/functional/v1/test_copy_to_file.py
daisy/tests/functional/v1/test_misc.py
daisy/tests/functional/v1/test_multiprocessing.py
daisy/tests/functional/v2/__init__.py
daisy/tests/functional/v2/registry_data_api.py
daisy/tests/functional/v2/test_images.py
daisy/tests/functional/v2/test_metadef_namespaces.py
daisy/tests/functional/v2/test_metadef_objects.py
daisy/tests/functional/v2/test_metadef_properties.py
daisy/tests/functional/v2/test_metadef_resourcetypes.py
daisy/tests/functional/v2/test_metadef_tags.py
daisy/tests/functional/v2/test_schemas.py
daisy/tests/functional/v2/test_tasks.py
daisy/tests/integration/__init__.py
daisy/tests/integration/legacy_functional/__init__.py
daisy/tests/integration/legacy_functional/base.py
daisy/tests/integration/legacy_functional/test_v1_api.py
daisy/tests/integration/v2/__init__.py
daisy/tests/integration/v2/base.py
daisy/tests/integration/v2/test_property_quota_violations.py
daisy/tests/integration/v2/test_tasks_api.py
daisy/tests/unit/__init__.py
daisy/tests/unit/base.py
daisy/tests/unit/fake_rados.py
daisy/tests/unit/test_artifact_type_definition_framework.py
daisy/tests/unit/test_artifacts_plugin_loader.py
daisy/tests/unit/test_auth.py
daisy/tests/unit/test_cache_middleware.py
daisy/tests/unit/test_cached_images.py
daisy/tests/unit/test_context.py
daisy/tests/unit/test_context_middleware.py
daisy/tests/unit/test_db.py
daisy/tests/unit/test_db_metadef.py
daisy/tests/unit/test_domain.py
daisy/tests/unit/test_domain_proxy.py
daisy/tests/unit/test_gateway.py
daisy/tests/unit/test_daisy_replicator.py
daisy/tests/unit/test_image_cache.py
daisy/tests/unit/test_image_cache_client.py
daisy/tests/unit/test_jsonpatchmixin.py
daisy/tests/unit/test_manage.py
daisy/tests/unit/test_migrations.py
daisy/tests/unit/test_misc.py
daisy/tests/unit/test_notifier.py
daisy/tests/unit/test_opts.py
daisy/tests/unit/test_policy.py
daisy/tests/unit/test_quota.py
daisy/tests/unit/test_schema.py
daisy/tests/unit/test_scrubber.py
daisy/tests/unit/test_search.py
daisy/tests/unit/test_store_image.py
daisy/tests/unit/test_store_location.py
daisy/tests/unit/test_versions.py
daisy/tests/unit/utils.py
daisy/tests/unit/api/__init__.py
daisy/tests/unit/api/test_cmd.py
daisy/tests/unit/api/test_cmd_cache_manage.py
daisy/tests/unit/api/test_common.py
daisy/tests/unit/api/test_property_protections.py
daisy/tests/unit/api/middleware/__init__.py
daisy/tests/unit/api/middleware/test_cache_manage.py
daisy/tests/unit/async/__init__.py
daisy/tests/unit/async/test_async.py
daisy/tests/unit/async/test_taskflow_executor.py
daisy/tests/unit/async/flows/__init__.py
daisy/tests/unit/async/flows/test_convert.py
daisy/tests/unit/async/flows/test_import.py
daisy/tests/unit/async/flows/test_introspect.py
daisy/tests/unit/common/__init__.py
daisy/tests/unit/common/test_client.py
daisy/tests/unit/common/test_config.py
daisy/tests/unit/common/test_exception.py
daisy/tests/unit/common/test_location_strategy.py
daisy/tests/unit/common/test_property_utils.py
daisy/tests/unit/common/test_rpc.py
daisy/tests/unit/common/test_scripts.py
daisy/tests/unit/common/test_semver.py
daisy/tests/unit/common/test_swift_store_utils.py
daisy/tests/unit/common/test_utils.py
daisy/tests/unit/common/test_wsgi.py
daisy/tests/unit/common/test_wsgi_ipv6.py
daisy/tests/unit/common/scripts/__init__.py
daisy/tests/unit/common/scripts/test_scripts_utils.py
daisy/tests/unit/common/scripts/image_import/__init__.py
daisy/tests/unit/common/scripts/image_import/test_main.py
daisy/tests/unit/v0_1/test_search.py
daisy/tests/unit/v1/__init__.py
daisy/tests/unit/v1/test_api.py
daisy/tests/unit/v1/test_registry_api.py
daisy/tests/unit/v1/test_registry_client.py
daisy/tests/unit/v1/test_upload_utils.py
daisy/tests/unit/v2/__init__.py
daisy/tests/unit/v2/test_image_actions_resource.py
daisy/tests/unit/v2/test_image_data_resource.py
daisy/tests/unit/v2/test_image_members_resource.py
daisy/tests/unit/v2/test_image_tags_resource.py
daisy/tests/unit/v2/test_images_resource.py
daisy/tests/unit/v2/test_metadef_resources.py
daisy/tests/unit/v2/test_registry_api.py
daisy/tests/unit/v2/test_registry_client.py
daisy/tests/unit/v2/test_schemas_resource.py
daisy/tests/unit/v2/test_tasks_resource.py
daisy/tests/var/ca.crt
daisy/tests/var/ca.key
daisy/tests/var/certificate.crt
daisy/tests/var/privatekey.key
rally-jobs/README.rst
rally-jobs/daisy.yaml
rally-jobs/extra/README.rst
rally-jobs/extra/fake.img
rally-jobs/plugins/README.rst
rally-jobs/plugins/plugin_sample.py
tools/colorizer.py
tools/install_venv.py
tools/install_venv_common.py
tools/migrate_image_owners.py
tools/with_venv.sh

View File

@ -1,43 +0,0 @@
[console_scripts]
daisy-api = daisy.cmd.api:main
daisy-cache-cleaner = daisy.cmd.cache_cleaner:main
daisy-cache-manage = daisy.cmd.cache_manage:main
daisy-cache-prefetcher = daisy.cmd.cache_prefetcher:main
daisy-cache-pruner = daisy.cmd.cache_pruner:main
daisy-control = daisy.cmd.control:main
daisy-index = daisy.cmd.index:main
daisy-manage = daisy.cmd.manage:main
daisy-registry = daisy.cmd.registry:main
daisy-replicator = daisy.cmd.replicator:main
daisy-scrubber = daisy.cmd.scrubber:main
daisy-search = daisy.cmd.search:main
daisy-orchestration = daisy.cmd.orchestration:main
[daisy.common.image_location_strategy.modules]
location_order_strategy = daisy.common.location_strategy.location_order
store_type_strategy = daisy.common.location_strategy.store_type
[daisy.database.metadata_backend]
sqlalchemy = daisy.db.sqlalchemy.metadata
[daisy.database.migration_backend]
sqlalchemy = oslo.db.sqlalchemy.migration
[daisy.flows]
import = daisy.async.flows.base_import:get_flow
[daisy.flows.import]
convert = daisy.async.flows.convert:get_flow
introspect = daisy.async.flows.introspect:get_flow
[daisy.search.index_backend]
image = daisy.search.plugins.images:ImageIndex
metadef = daisy.search.plugins.metadefs:MetadefIndex
[oslo.config.opts]
daisy.api = daisy.opts:list_api_opts
daisy.cache = daisy.opts:list_cache_opts
daisy.manage = daisy.opts:list_manage_opts
daisy.registry = daisy.opts:list_registry_opts
daisy.scrubber = daisy.opts:list_scrubber_opts

View File

@ -1 +0,0 @@

View File

@ -1 +0,0 @@
{"is_release": true, "git_version": "93b0d5f"}

View File

@ -1,40 +0,0 @@
pbr>=0.6,!=0.7,<1.0
greenlet>=0.3.2
SQLAlchemy>=0.9.7,<=0.9.99
anyjson>=0.3.3
eventlet>=0.16.1,!=0.17.0
PasteDeploy>=1.5.0
Routes>=1.12.3,!=2.0
WebOb>=1.2.3
sqlalchemy-migrate>=0.9.5
httplib2>=0.7.5
kombu>=2.5.0
pycrypto>=2.6
iso8601>=0.1.9
ordereddict
oslo_config>=1.9.3,<1.10.0 # Apache-2.0
oslo_concurrency>=1.8.0,<1.9.0 # Apache-2.0
oslo_context>=0.2.0,<0.3.0 # Apache-2.0
oslo_utils>=1.4.0,<1.5.0 # Apache-2.0
stevedore>=1.3.0,<1.4.0 # Apache-2.0
taskflow>=0.7.1,<0.8.0
keystonemiddleware>=1.5.0,<1.6.0
WSME>=0.6
posix_ipc
python-swiftclient>=2.2.0,<2.5.0
oslo_vmware>=0.11.1,<0.12.0 # Apache-2.0
Paste
jsonschema>=2.0.0,<3.0.0
python-keystoneclient>=1.1.0,<1.4.0
pyOpenSSL>=0.11
six>=1.9.0
oslo_db>=1.7.0,<1.8.0 # Apache-2.0
oslo_i18n>=1.5.0,<1.6.0 # Apache-2.0
oslo_log>=1.0.0,<1.1.0 # Apache-2.0
oslo_messaging>=1.8.0,<1.9.0 # Apache-2.0
oslo_policy>=0.3.1,<0.4.0 # Apache-2.0
oslo_serialization>=1.4.0,<1.5.0 # Apache-2.0
retrying>=1.2.3,!=1.3.0 # Apache-2.0
osprofiler>=0.3.0 # Apache-2.0
glance_store>=0.3.0,<0.5.0 # Apache-2.0
semantic_version>=2.3.1

View File

@ -1 +0,0 @@
daisy

View File

@ -40,7 +40,6 @@ from daisy.common import exception
from daisy.common import utils
from daisy.db import migration as db_migration
from daisy.db.sqlalchemy import api as db_api
from daisy.db.sqlalchemy import metadata
from daisy import i18n
# If ../glance/__init__.py exists, add ../ to Python search path, so that
@ -115,38 +114,6 @@ class DbCommands(object):
db_migration.MIGRATE_REPO_PATH,
version)
@args('--path', metavar='<path>', help='Path to the directory or file '
'where json metadata is stored')
@args('--merge', action='store_true',
help='Merge files with data that is in the database. By default it '
'prefers existing data over new. This logic can be changed by '
'combining --merge option with one of these two options: '
'--prefer_new or --overwrite.')
@args('--prefer_new', action='store_true',
help='Prefer new metadata over existing. Existing metadata '
'might be overwritten. Needs to be combined with --merge '
'option.')
@args('--overwrite', action='store_true',
help='Drop and rewrite metadata. Needs to be combined with --merge '
'option')
def load_metadefs(self, path=None, merge=False,
prefer_new=False, overwrite=False):
"""Load metadefinition json files to database"""
metadata.db_load_metadefs(db_api.get_engine(), path, merge,
prefer_new, overwrite)
def unload_metadefs(self):
"""Unload metadefinitions from database"""
metadata.db_unload_metadefs(db_api.get_engine())
@args('--path', metavar='<path>', help='Path to the directory where '
'json metadata files should be '
'saved.')
def export_metadefs(self, path=None):
"""Export metadefinitions data from database to files"""
metadata.db_export_metadefs(db_api.get_engine(),
path)
class DbLegacyCommands(object):
"""Class for managing the db using legacy commands"""

View File

@ -1,749 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import re
import types
import semantic_version
import six
from daisy.common import exception as exc
from daisy import i18n
_ = i18n._
class AttributeDefinition(object):
"""A base class for the attribute definitions which may be added to
declaratively defined artifact types
"""
ALLOWED_TYPES = (object,)
def __init__(self,
display_name=None,
description=None,
readonly=False,
mutable=True,
required=False,
default=None):
"""Initializes attribute definition
:param display_name: Display name of the attribute
:param description: Description of the attribute
:param readonly: Flag indicating if the value of attribute may not be
changed once an artifact is created
:param mutable: Flag indicating if the value of attribute may not be
changed once an artifact is published
:param required: Flag indicating if the value of attribute is required
:param default: default value of the attribute
"""
self.name = None
self.display_name = display_name
self.description = description
self.readonly = readonly
self.required = required
self.mutable = mutable
self.default = default
self._add_validator('type',
lambda v: isinstance(v, self.ALLOWED_TYPES),
_("Not a valid value type"))
self._validate_default()
def _set_name(self, value):
self.name = value
if self.display_name is None:
self.display_name = value
def _add_validator(self, name, func, message):
if not hasattr(self, '_validators'):
self._validators = []
self._validators_index = {}
pair = (func, message)
self._validators.append(pair)
self._validators_index[name] = pair
def _get_validator(self, name):
return self._validators_index.get(name)
def _remove_validator(self, name):
pair = self._validators_index.pop(name, None)
if pair is not None:
self._validators.remove(pair)
def _check_definition(self):
self._validate_default()
def _validate_default(self):
if self.default:
try:
self.validate(self.default, 'default')
except exc.InvalidArtifactPropertyValue:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Default value is invalid"))
def get_value(self, obj):
return getattr(obj, self.name)
def set_value(self, obj, value):
return setattr(obj, self.name, value)
def validate(self, value, name=None):
if value is None:
if self.required:
raise exc.InvalidArtifactPropertyValue(
name=name or self.name,
val=value,
msg=_('Value is required'))
else:
return
first_error = next((msg for v_func, msg in self._validators
if not v_func(value)), None)
if first_error:
raise exc.InvalidArtifactPropertyValue(name=name or self.name,
val=value,
msg=first_error)
class ListAttributeDefinition(AttributeDefinition):
"""A base class for Attribute definitions having List-semantics
Is inherited by Array, ArtifactReferenceList and BinaryObjectList
"""
ALLOWED_TYPES = (types.ListType,)
ALLOWED_ITEM_TYPES = (AttributeDefinition, )
def _check_item_type(self, item):
if not isinstance(item, self.ALLOWED_ITEM_TYPES):
raise exc.InvalidArtifactTypePropertyDefinition(
_('Invalid item type specification'))
if item.default is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('List definitions may hot have defaults'))
def __init__(self, item_type, min_size=0, max_size=None, unique=False,
**kwargs):
super(ListAttributeDefinition, self).__init__(**kwargs)
if isinstance(item_type, types.ListType):
for it in item_type:
self._check_item_type(it)
# we need to copy the item_type collection
self.item_type = item_type[:]
if min_size != 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot specify 'min_size' explicitly")
)
if max_size is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot specify 'max_size' explicitly")
)
# setting max_size and min_size to the length of item_type,
# as tuple-semantic assumes that the number of elements is set
# by the type spec
min_size = max_size = len(item_type)
else:
self._check_item_type(item_type)
self.item_type = item_type
if min_size:
self.min_size(min_size)
if max_size:
self.max_size(max_size)
if unique:
self.unique()
def min_size(self, value):
self._min_size = value
if value is not None:
self._add_validator('min_size',
lambda v: len(v) >= self._min_size,
_('List size is less than minimum'))
else:
self._remove_validator('min_size')
def max_size(self, value):
self._max_size = value
if value is not None:
self._add_validator('max_size',
lambda v: len(v) <= self._max_size,
_('List size is greater than maximum'))
else:
self._remove_validator('max_size')
def unique(self, value=True):
self._unique = value
if value:
def _unique(items):
seen = set()
for item in items:
if item in seen:
return False
seen.add(item)
return True
self._add_validator('unique',
_unique, _('Items have to be unique'))
else:
self._remove_validator('unique')
def _set_name(self, value):
super(ListAttributeDefinition, self)._set_name(value)
if isinstance(self.item_type, types.ListType):
for i, item in enumerate(self.item_type):
item._set_name("%s[%i]" % (value, i))
else:
self.item_type._set_name("%s[*]" % value)
def validate(self, value, name=None):
super(ListAttributeDefinition, self).validate(value, name)
if value is not None:
for i, item in enumerate(value):
self._validate_item_at(item, i)
def get_item_definition_at_index(self, index):
if isinstance(self.item_type, types.ListType):
if index < len(self.item_type):
return self.item_type[index]
else:
return None
return self.item_type
def _validate_item_at(self, item, index):
item_type = self.get_item_definition_at_index(index)
# set name if none has been given to the list element at given index
if (isinstance(self.item_type, types.ListType) and item_type and
not item_type.name):
item_type.name = "%s[%i]" % (self.name, index)
if item_type:
item_type.validate(item)
class DictAttributeDefinition(AttributeDefinition):
"""A base class for Attribute definitions having Map-semantics
Is inherited by Dict
"""
ALLOWED_TYPES = (types.DictionaryType,)
ALLOWED_PROPERTY_TYPES = (AttributeDefinition,)
def _check_prop(self, key, item):
if (not isinstance(item, self.ALLOWED_PROPERTY_TYPES) or
(key is not None and not isinstance(key, types.StringTypes))):
raise exc.InvalidArtifactTypePropertyDefinition(
_('Invalid dict property type specification'))
@staticmethod
def _validate_key(key):
if not isinstance(key, types.StringTypes):
raise exc.InvalidArtifactPropertyValue(
_('Invalid dict property type'))
def __init__(self, properties, min_properties=0, max_properties=0,
**kwargs):
super(DictAttributeDefinition, self).__init__(**kwargs)
if isinstance(properties, types.DictionaryType):
for key, value in six.iteritems(properties):
self._check_prop(key, value)
# copy the properties dict
self.properties = properties.copy()
self._add_validator('keys',
lambda v: set(v.keys()) <= set(
self.properties.keys()),
_('Dictionary contains unexpected key(s)'))
else:
self._check_prop(None, properties)
self.properties = properties
if min_properties:
self.min_properties(min_properties)
if max_properties:
self.max_properties(max_properties)
def min_properties(self, value):
self._min_properties = value
if value is not None:
self._add_validator('min_properties',
lambda v: len(v) >= self._min_properties,
_('Dictionary size is less than '
'minimum'))
else:
self._remove_validator('min_properties')
def max_properties(self, value):
self._max_properties = value
if value is not None:
self._add_validator('max_properties',
lambda v: len(v) <= self._max_properties,
_('Dictionary size is '
'greater than maximum'))
else:
self._remove_validator('max_properties')
def _set_name(self, value):
super(DictAttributeDefinition, self)._set_name(value)
if isinstance(self.properties, types.DictionaryType):
for k, v in six.iteritems(self.properties):
v._set_name(value)
else:
self.properties._set_name(value)
def validate(self, value, name=None):
super(DictAttributeDefinition, self).validate(value, name)
if value is not None:
for k, v in six.iteritems(value):
self._validate_item_with_key(v, k)
def _validate_item_with_key(self, value, key):
self._validate_key(key)
if isinstance(self.properties, types.DictionaryType):
prop_def = self.properties.get(key)
if prop_def is not None:
name = "%s[%s]" % (prop_def.name, key)
prop_def.validate(value, name=name)
else:
name = "%s[%s]" % (self.properties.name, key)
self.properties.validate(value, name=name)
def get_prop_definition_at_key(self, key):
if isinstance(self.properties, types.DictionaryType):
return self.properties.get(key)
else:
return self.properties
class PropertyDefinition(AttributeDefinition):
"""A base class for Attributes defining generic or type-specific metadata
properties
"""
DB_TYPE = None
def __init__(self,
internal=False,
allowed_values=None,
validators=None,
**kwargs):
"""Defines a metadata property
:param internal: a flag indicating that the property is internal, i.e.
not returned to client
:param allowed_values: specifies a list of values allowed for the
property
:param validators: specifies a list of custom validators for the
property
"""
super(PropertyDefinition, self).__init__(**kwargs)
self.internal = internal
self._allowed_values = None
if validators is not None:
try:
for i, (f, m) in enumerate(validators):
self._add_validator("custom_%i" % i, f, m)
except ValueError:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Custom validators list should contain tuples "
"'(function, message)'"))
if allowed_values is not None:
# copy the allowed_values, as this is going to create a
# closure, and we need to make sure that external modification of
# this list does not affect the created validator
self.allowed_values(allowed_values)
self._check_definition()
def _validate_allowed_values(self):
if self._allowed_values:
try:
for allowed_value in self._allowed_values:
self.validate(allowed_value, 'allowed_value')
except exc.InvalidArtifactPropertyValue:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Allowed values %s are invalid under given validators") %
self._allowed_values)
def allowed_values(self, values):
self._allowed_values = values[:]
if values is not None:
self._add_validator('allowed', lambda v: v in self._allowed_values,
_("Is not allowed value"))
else:
self._remove_validator('allowed')
self._check_definition()
def _check_definition(self):
self._validate_allowed_values()
super(PropertyDefinition, self)._check_definition()
class RelationDefinition(AttributeDefinition):
"""A base class for Attributes defining cross-artifact relations"""
def __init__(self, internal=False, **kwargs):
self.internal = internal
kwargs.setdefault('mutable', False)
# if mutable=True has been passed -> raise an exception
if kwargs['mutable'] is True:
raise exc.InvalidArtifactTypePropertyDefinition(
_("Dependency relations cannot be mutable"))
super(RelationDefinition, self).__init__(**kwargs)
class BlobDefinition(AttributeDefinition):
"""A base class for Attributes defining binary objects"""
pass
class ArtifactTypeMetaclass(type):
"""A metaclass to build Artifact Types. Not intended to be used directly
Use `get_declarative_base` to get the base class instead
"""
def __init__(cls, class_name, bases, attributes):
if '_declarative_artifact_type' not in cls.__dict__:
_build_declarative_meta(cls)
super(ArtifactTypeMetaclass, cls).__init__(class_name, bases,
attributes)
class ArtifactPropertyDescriptor(object):
"""A descriptor object for working with artifact attributes"""
def __init__(self, prop, collection_wrapper_class=None):
self.prop = prop
self.collection_wrapper_class = collection_wrapper_class
def __get__(self, instance, owner):
if instance is None:
# accessed via owner class
return self.prop
else:
v = getattr(instance, '_' + self.prop.name, None)
if v is None and self.prop.default is not None:
v = copy.copy(self.prop.default)
self.__set__(instance, v, ignore_mutability=True)
return self.__get__(instance, owner)
else:
if v is not None and self.collection_wrapper_class:
if self.prop.readonly:
readonly = True
elif (not self.prop.mutable and
hasattr(instance, '__is_mutable__') and
not hasattr(instance,
'__suspend_mutability_checks__')):
readonly = not instance.__is_mutable__()
else:
readonly = False
if readonly:
v = v.__make_immutable__()
return v
def __set__(self, instance, value, ignore_mutability=False):
if instance:
if self.prop.readonly:
if hasattr(instance, '_' + self.prop.name):
raise exc.InvalidArtifactPropertyValue(
_('Attempt to set readonly property'))
if not self.prop.mutable:
if (hasattr(instance, '__is_mutable__') and
not hasattr(instance,
'__suspend_mutability_checks__')):
mutable = instance.__is_mutable__() or ignore_mutability
if not mutable:
raise exc.InvalidArtifactPropertyValue(
_('Attempt to set value of immutable property'))
if value is not None and self.collection_wrapper_class:
value = self.collection_wrapper_class(value)
value.property = self.prop
self.prop.validate(value)
setattr(instance, '_' + self.prop.name, value)
class ArtifactAttributes(object):
"""A container class storing description of Artifact Type attributes"""
def __init__(self):
self.properties = {}
self.dependencies = {}
self.blobs = {}
self.all = {}
@property
def default_dependency(self):
"""Returns the default dependency relation for an artifact type"""
if len(self.dependencies) == 1:
return self.dependencies.values()[0]
@property
def default_blob(self):
"""Returns the default blob object for an artifact type"""
if len(self.blobs) == 1:
return self.blobs.values()[0]
@property
def default_properties_dict(self):
"""Returns a default properties dict for an artifact type"""
dict_props = [v for v in self.properties.values() if
isinstance(v, DictAttributeDefinition)]
if len(dict_props) == 1:
return dict_props[0]
@property
def tags(self):
"""Returns tags property for an artifact type"""
return self.properties.get('tags')
def add(self, attribute):
self.all[attribute.name] = attribute
if isinstance(attribute, PropertyDefinition):
self.properties[attribute.name] = attribute
elif isinstance(attribute, BlobDefinition):
self.blobs[attribute.name] = attribute
elif isinstance(attribute, RelationDefinition):
self.dependencies[attribute.name] = attribute
class ArtifactTypeMetadata(object):
"""A container to store the meta-information about an artifact type"""
def __init__(self, type_name, type_display_name, type_version,
type_description, endpoint):
"""Initializes the Artifact Type metadata
:param type_name: name of the artifact type
:param type_display_name: display name of the artifact type
:param type_version: version of the artifact type
:param type_description: description of the artifact type
:param endpoint: REST API URI suffix to call the artifacts of this type
"""
self.attributes = ArtifactAttributes()
# These are going to be defined by third-party plugin
# developers, so we need to do some validations on these values and
# raise InvalidArtifactTypeDefinition if they are violated
self.type_name = type_name
self.type_display_name = type_display_name or type_name
self.type_version = type_version or '1.0'
self.type_description = type_description
self.endpoint = endpoint or type_name.lower()
self._validate_string(self.type_name, 'Type name', min_length=1,
max_length=255)
self._validate_string(self.type_display_name, 'Type display name',
max_length=255)
self._validate_string(self.type_description, 'Type description')
self._validate_string(self.endpoint, 'endpoint', min_length=1)
try:
semantic_version.Version(self.type_version, partial=True)
except ValueError:
raise exc.InvalidArtifactTypeDefinition(
message=_("Type version has to be a valid semver string"))
@staticmethod
def _validate_string(value, name, min_length=0, max_length=None,
pattern=None):
if value is None:
if min_length > 0:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s is required"), attribute=name)
else:
return
if not isinstance(value, six.string_types):
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s have to be string"), attribute=name)
if max_length and len(value) > max_length:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s may not be longer than %(length)i"),
attribute=name, length=max_length)
if min_length and len(value) < min_length:
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s may not be shorter than %(length)i"),
attribute=name, length=min_length)
if pattern and not re.match(pattern, value):
raise exc.InvalidArtifactTypeDefinition(
message=_("%(attribute)s should match pattern %(pattern)s"),
attribute=name, pattern=pattern.pattern)
def _build_declarative_meta(cls):
attrs = dict(cls.__dict__)
type_name = None
type_display_name = None
type_version = None
type_description = None
endpoint = None
for base in cls.__mro__:
for name, value in six.iteritems(vars(base)):
if name == '__type_name__':
if not type_name:
type_name = cls.__type_name__
elif name == '__type_version__':
if not type_version:
type_version = cls.__type_version__
elif name == '__type_description__':
if not type_description:
type_description = cls.__type_description__
elif name == '__endpoint__':
if not endpoint:
endpoint = cls.__endpoint__
elif name == '__type_display_name__':
if not type_display_name:
type_display_name = cls.__type_display_name__
elif base is not cls and name not in attrs:
if isinstance(value, AttributeDefinition):
attrs[name] = value
elif isinstance(value, ArtifactPropertyDescriptor):
attrs[name] = value.prop
meta = ArtifactTypeMetadata(type_name=type_name or cls.__name__,
type_display_name=type_display_name,
type_version=type_version,
type_description=type_description,
endpoint=endpoint)
setattr(cls, 'metadata', meta)
for k, v in attrs.items():
if k == 'metadata':
raise exc.InvalidArtifactTypePropertyDefinition(
_("Cannot declare artifact property with reserved name "
"'metadata'"))
if isinstance(v, AttributeDefinition):
v._set_name(k)
wrapper_class = None
if isinstance(v, ListAttributeDefinition):
wrapper_class = type("ValidatedList", (list,), {})
_add_validation_to_list(wrapper_class)
if isinstance(v, DictAttributeDefinition):
wrapper_class = type("ValidatedDict", (dict,), {})
_add_validation_to_dict(wrapper_class)
prop_descr = ArtifactPropertyDescriptor(v, wrapper_class)
setattr(cls, k, prop_descr)
meta.attributes.add(v)
def _validating_method(method, klass):
def wrapper(self, *args, **kwargs):
instance_copy = klass(self)
method(instance_copy, *args, **kwargs)
self.property.validate(instance_copy)
method(self, *args, **kwargs)
return wrapper
def _immutable_method(method):
def substitution(*args, **kwargs):
raise exc.InvalidArtifactPropertyValue(
_("Unable to modify collection in "
"immutable or readonly property"))
return substitution
def _add_immutable_wrappers(class_to_add, wrapped_methods):
for method_name in wrapped_methods:
method = getattr(class_to_add, method_name, None)
if method:
setattr(class_to_add, method_name, _immutable_method(method))
def _add_validation_wrappers(class_to_validate, base_class, validated_methods):
for method_name in validated_methods:
method = getattr(class_to_validate, method_name, None)
if method:
setattr(class_to_validate, method_name,
_validating_method(method, base_class))
readonly_class = type("Readonly" + class_to_validate.__name__,
(class_to_validate,), {})
_add_immutable_wrappers(readonly_class, validated_methods)
def __make_immutable__(self):
return readonly_class(self)
class_to_validate.__make_immutable__ = __make_immutable__
def _add_validation_to_list(list_based_class):
validated_methods = ['append', 'extend', 'insert', 'pop', 'remove',
'reverse', 'sort', '__setitem__', '__delitem__',
'__delslice__']
_add_validation_wrappers(list_based_class, list, validated_methods)
def _add_validation_to_dict(dict_based_class):
validated_methods = ['pop', 'popitem', 'setdefault', 'update',
'__delitem__', '__setitem__', 'clear']
_add_validation_wrappers(dict_based_class, dict, validated_methods)
def _kwarg_init_constructor(self, **kwargs):
self.__suspend_mutability_checks__ = True
try:
for k in kwargs:
if not hasattr(type(self), k):
raise exc.ArtifactInvalidProperty(prop=k)
setattr(self, k, kwargs[k])
self._validate_required(self.metadata.attributes.properties)
finally:
del self.__suspend_mutability_checks__
def _validate_required(self, attribute_dict):
for k, v in six.iteritems(attribute_dict):
if v.required and (not hasattr(self, k) or getattr(self, k) is None):
raise exc.InvalidArtifactPropertyValue(name=k, val=None,
msg=_('Value is required'))
def _update(self, values):
for k in values:
if hasattr(type(self), k):
setattr(self, k, values[k])
else:
raise exc.ArtifactInvalidProperty(prop=k)
def _pre_publish_validator(self, *args, **kwargs):
self._validate_required(self.metadata.attributes.blobs)
self._validate_required(self.metadata.attributes.dependencies)
_kwarg_init_constructor.__name__ = '__init__'
_pre_publish_validator.__name__ = '__pre_publish__'
_update.__name__ = 'update'
def get_declarative_base(name='base', base_class=object):
"""Returns a base class which should be inherited to construct Artifact
Type object using the declarative syntax of attribute definition
"""
bases = not isinstance(base_class, tuple) and (base_class,) or base_class
class_dict = {'__init__': _kwarg_init_constructor,
'_validate_required': _validate_required,
'__pre_publish__': _pre_publish_validator,
'_declarative_artifact_type': True,
'update': _update}
return ArtifactTypeMetaclass(name, bases, class_dict)

View File

@ -1,572 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import numbers
import re
import types
import semantic_version
import six
from daisy.common.artifacts import declarative
import daisy.common.exception as exc
from daisy import i18n
_ = i18n._
class Text(declarative.PropertyDefinition):
"""A text metadata property of arbitrary length
Maps to TEXT columns in database, does not support sorting or filtering
"""
ALLOWED_TYPES = (six.string_types,)
DB_TYPE = 'text'
# noinspection PyAttributeOutsideInit
class String(Text):
"""A string metadata property of limited length
Maps to VARCHAR columns in database, supports filtering and sorting.
May have constrains on length and regexp patterns.
The maximum length is limited to 255 characters
"""
DB_TYPE = 'string'
def __init__(self, max_length=255, min_length=0, pattern=None, **kwargs):
"""Defines a String metadata property.
:param max_length: maximum value length
:param min_length: minimum value length
:param pattern: regexp pattern to match
"""
super(String, self).__init__(**kwargs)
self.max_length(max_length)
self.min_length(min_length)
if pattern:
self.pattern(pattern)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def max_length(self, value):
"""Sets the maximum value length"""
self._max_length = value
if value is not None:
if value > 255:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Max string length may not exceed 255 characters'))
self._add_validator('max_length',
lambda v: len(v) <= self._max_length,
_('Length is greater than maximum'))
else:
self._remove_validator('max_length')
self._check_definition()
def min_length(self, value):
"""Sets the minimum value length"""
self._min_length = value
if value is not None:
if value < 0:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Min string length may not be negative'))
self._add_validator('min_length',
lambda v: len(v) >= self._min_length,
_('Length is less than minimum'))
else:
self._remove_validator('min_length')
self._check_definition()
def pattern(self, value):
"""Sets the regexp pattern to match"""
self._pattern = value
if value is not None:
self._add_validator('pattern',
lambda v: re.match(self._pattern,
v) is not None,
_('Does not match pattern'))
else:
self._remove_validator('pattern')
self._check_definition()
class SemVerString(String):
"""A String metadata property matching semver pattern"""
def __init__(self, **kwargs):
def validate(value):
try:
semantic_version.Version(value, partial=True)
except ValueError:
return False
return True
super(SemVerString,
self).__init__(validators=[(validate,
"Invalid semver string")],
**kwargs)
# noinspection PyAttributeOutsideInit
class Integer(declarative.PropertyDefinition):
"""An Integer metadata property
Maps to INT columns in Database, supports filtering and sorting.
May have constraints on value
"""
ALLOWED_TYPES = (six.integer_types,)
DB_TYPE = 'int'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines an Integer metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Integer, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class DateTime(declarative.PropertyDefinition):
"""A DateTime metadata property
Maps to a DATETIME columns in database.
Is not supported as Type Specific property, may be used only as Generic one
May have constraints on value
"""
ALLOWED_TYPES = (datetime.datetime,)
DB_TYPE = 'datetime'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a DateTime metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(DateTime, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
# noinspection PyAttributeOutsideInit
class Numeric(declarative.PropertyDefinition):
"""A Numeric metadata property
Maps to floating point number columns in Database, supports filtering and
sorting. May have constraints on value
"""
ALLOWED_TYPES = numbers.Number
DB_TYPE = 'numeric'
def __init__(self, min_value=None, max_value=None, **kwargs):
"""Defines a Numeric metadata property
:param min_value: minimum allowed value
:param max_value: maximum allowed value
"""
super(Numeric, self).__init__(**kwargs)
if min_value is not None:
self.min_value(min_value)
if max_value is not None:
self.max_value(max_value)
# if default and/or allowed_values are specified (in base classes)
# then we need to validate them against the newly added validators
self._check_definition()
def min_value(self, value):
"""Sets the minimum allowed value"""
self._min_value = value
if value is not None:
self._add_validator('min_value',
lambda v: v >= self._min_value,
_('Value is less than minimum'))
else:
self._remove_validator('min_value')
self._check_definition()
def max_value(self, value):
"""Sets the maximum allowed value"""
self._max_value = value
if value is not None:
self._add_validator('max_value',
lambda v: v <= self._max_value,
_('Value is greater than maximum'))
else:
self._remove_validator('max_value')
self._check_definition()
class Boolean(declarative.PropertyDefinition):
"""A Boolean metadata property
Maps to Boolean columns in database. Supports filtering and sorting.
"""
ALLOWED_TYPES = (types.BooleanType,)
DB_TYPE = 'bool'
class Array(declarative.ListAttributeDefinition,
declarative.PropertyDefinition, list):
"""An array metadata property
May contain elements of any other PropertyDefinition types except Dict and
Array. Each elements maps to appropriate type of columns in database.
Preserves order. Allows filtering based on "Array contains Value" semantics
May specify constrains on types of elements, their amount and uniqueness.
"""
ALLOWED_ITEM_TYPES = (declarative.PropertyDefinition,)
def __init__(self, item_type=String(), min_size=0, max_size=None,
unique=False, extra_items=True, **kwargs):
"""Defines an Array metadata property
:param item_type: defines the types of elements in Array. If set to an
instance of PropertyDefinition then all the elements have to be of that
type. If set to list of such instances, then the elements on the
corresponding positions have to be of the appropriate type.
:param min_size: minimum size of the Array
:param max_size: maximum size of the Array
:param unique: if set to true, all the elements in the Array have to be
unique
"""
if isinstance(item_type, Array):
msg = _("Array property can't have item_type=Array")
raise exc.InvalidArtifactTypePropertyDefinition(msg)
declarative.ListAttributeDefinition.__init__(self,
item_type=item_type,
min_size=min_size,
max_size=max_size,
unique=unique)
declarative.PropertyDefinition.__init__(self, **kwargs)
class Dict(declarative.DictAttributeDefinition,
declarative.PropertyDefinition, dict):
"""A dictionary metadata property
May contain elements of any other PropertyDefinition types except Dict.
Each elements maps to appropriate type of columns in database. Allows
filtering and sorting by values of each key except the ones mapping the
Text fields.
May specify constrains on types of elements and their amount.
"""
ALLOWED_PROPERTY_TYPES = (declarative.PropertyDefinition,)
def __init__(self, properties=String(), min_properties=0,
max_properties=None, **kwargs):
"""Defines a dictionary metadata property
:param properties: defines the types of dictionary values. If set to an
instance of PropertyDefinition then all the value have to be of that
type. If set to a dictionary with string keys and values of
PropertyDefinition type, then the elements mapped by the corresponding
have have to be of the appropriate type.
:param min_properties: minimum allowed amount of properties in the dict
:param max_properties: maximum allowed amount of properties in the dict
"""
declarative.DictAttributeDefinition. \
__init__(self,
properties=properties,
min_properties=min_properties,
max_properties=max_properties)
declarative.PropertyDefinition.__init__(self, **kwargs)
class ArtifactType(declarative.get_declarative_base()): # noqa
"""A base class for all the Artifact Type definitions
Defines the Generic metadata properties as attributes.
"""
id = String(required=True, readonly=True)
type_name = String(required=True, readonly=True)
type_version = SemVerString(required=True, readonly=True)
name = String(required=True, mutable=False)
version = SemVerString(required=True, mutable=False)
description = Text()
tags = Array(unique=True, default=[])
visibility = String(required=True,
allowed_values=["private", "public", "shared",
"community"],
default="private")
state = String(required=True, readonly=True, allowed_values=["creating",
"active",
"deactivated",
"deleted"])
owner = String(required=True, readonly=True)
created_at = DateTime(required=True, readonly=True)
updated_at = DateTime(required=True, readonly=True)
published_at = DateTime(readonly=True)
deleted_at = DateTime(readonly=True)
def __init__(self, **kwargs):
if "type_name" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type explicitly"))
if "type_version" in kwargs:
raise exc.InvalidArtifactPropertyValue(
_("Unable to specify artifact type version explicitly"))
super(ArtifactType,
self).__init__(type_name=self.metadata.type_name,
type_version=self.metadata.type_version, **kwargs)
def __eq__(self, other):
if not isinstance(other, ArtifactType):
return False
return self.id == other.id
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.id)
def __is_mutable__(self):
return self.state == "creating"
class ArtifactReference(declarative.RelationDefinition):
"""An artifact reference definition
Allows to define constraints by the name and version of target artifact
"""
ALLOWED_TYPES = ArtifactType
def __init__(self, type_name=None, type_version=None, **kwargs):
"""Defines an artifact reference
:param type_name: type name of the target artifact
:param type_version: type version of the target artifact
"""
super(ArtifactReference, self).__init__(**kwargs)
if type_name is not None:
if isinstance(type_name, types.ListType):
type_names = list(type_name)
if type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if multiple types are possible'))
else:
type_names = [type_name]
def validate_reference(artifact):
if artifact.type_name not in type_names:
return False
if (type_version is not None and
artifact.type_version != type_version):
return False
return True
self._add_validator('referenced_type',
validate_reference,
_("Invalid referenced type"))
elif type_version is not None:
raise exc.InvalidArtifactTypePropertyDefinition(
_('Unable to specify version '
'if type is not specified'))
self._check_definition()
class ArtifactReferenceList(declarative.ListAttributeDefinition,
declarative.RelationDefinition, list):
"""A list of Artifact References
Allows to define a collection of references to other artifacts, each
optionally constrained by type name and type version
"""
ALLOWED_ITEM_TYPES = (ArtifactReference,)
def __init__(self, references=ArtifactReference(), min_size=0,
max_size=None, **kwargs):
if isinstance(references, types.ListType):
raise exc.InvalidArtifactTypePropertyDefinition(
_("Invalid reference list specification"))
declarative.RelationDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=references,
min_size=min_size,
max_size=max_size,
unique=True,
default=[]
if min_size == 0 else
None)
class Blob(object):
"""A Binary object being part of the Artifact"""
def __init__(self, size=0, locations=None, checksum=None, item_key=None):
"""Initializes a new Binary Object for an Artifact
:param size: the size of Binary Data
:param locations: a list of data locations in backing stores
:param checksum: a checksum for the data
"""
if locations is None:
locations = []
self.size = size
self.checksum = checksum
self.locations = locations
self.item_key = item_key
def to_dict(self):
return {
"size": self.size,
"checksum": self.checksum,
}
class BinaryObject(declarative.BlobDefinition, Blob):
"""A definition of BinaryObject binding
Adds a BinaryObject to an Artifact Type, optionally constrained by file
size and amount of locations
"""
ALLOWED_TYPES = (Blob,)
def __init__(self,
max_file_size=None,
min_file_size=None,
min_locations=None,
max_locations=None,
**kwargs):
"""Defines a binary object as part of Artifact Type
:param max_file_size: maximum size of the associate Blob
:param min_file_size: minimum size of the associated Blob
:param min_locations: minimum number of locations in the associated
Blob
:param max_locations: maximum number of locations in the associated
Blob
"""
super(BinaryObject, self).__init__(default=None, readonly=False,
mutable=False, **kwargs)
self._max_file_size = max_file_size
self._min_file_size = min_file_size
self._min_locations = min_locations
self._max_locations = max_locations
self._add_validator('size_not_empty',
lambda v: v.size is not None,
_('Blob size is not set'))
if max_file_size:
self._add_validator('max_size',
lambda v: v.size <= self._max_file_size,
_("File too large"))
if min_file_size:
self._add_validator('min_size',
lambda v: v.size >= self._min_file_size,
_("File too small"))
if min_locations:
self._add_validator('min_locations',
lambda v: len(
v.locations) >= self._min_locations,
_("Too few locations"))
if max_locations:
self._add_validator(
'max_locations',
lambda v: len(v.locations) <= self._max_locations,
_("Too many locations"))
class BinaryObjectList(declarative.ListAttributeDefinition,
declarative.BlobDefinition, list):
"""A definition of binding to the list of BinaryObject
Adds a list of BinaryObject's to an artifact type, optionally constrained
by the number of objects in the list and their uniqueness
"""
ALLOWED_ITEM_TYPES = (BinaryObject,)
def __init__(self, objects=BinaryObject(), min_count=0, max_count=None,
**kwargs):
declarative.BlobDefinition.__init__(self, **kwargs)
declarative.ListAttributeDefinition.__init__(self,
item_type=objects,
min_size=min_count,
max_size=max_count,
unique=True)
self.default = [] if min_count == 0 else None

View File

@ -1,196 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from oslo_config import cfg
import semantic_version
from stevedore import enabled
from daisy.common.artifacts import definitions
from daisy.common import exception
from daisy import i18n
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
_LW = i18n._LW
_LI = i18n._LI
plugins_opts = [
cfg.BoolOpt('load_enabled', default=True,
help=_('When false, no artifacts can be loaded regardless of'
' available_plugins. When true, artifacts can be'
' loaded.')),
cfg.ListOpt('available_plugins', default=[],
help=_('A list of artifacts that are allowed in the'
' format name or name-version. Empty list means that'
' any artifact can be loaded.'))
]
CONF = cfg.CONF
CONF.register_opts(plugins_opts)
class ArtifactsPluginLoader(object):
def __init__(self, namespace):
self.mgr = enabled.EnabledExtensionManager(
check_func=self._gen_check_func(),
namespace=namespace,
propagate_map_exceptions=True,
on_load_failure_callback=self._on_load_failure)
self.plugin_map = {'by_typename': {},
'by_endpoint': {}}
def _add_extention(ext):
"""
Plugins can be loaded as entry_point=single plugin and
entry_point=PLUGIN_LIST, where PLUGIN_LIST is a python variable
holding a list of plugins
"""
def _load_one(plugin):
if issubclass(plugin, definitions.ArtifactType):
# make sure that have correct plugin name
art_name = plugin.metadata.type_name
if art_name != ext.name:
raise exception.ArtifactNonMatchingTypeName(
name=art_name, plugin=ext.name)
# make sure that no plugin with the same name and version
# already exists
exists = self._get_plugins(ext.name)
new_tv = plugin.metadata.type_version
if any(e.metadata.type_version == new_tv for e in exists):
raise exception.ArtifactDuplicateNameTypeVersion()
self._add_plugin("by_endpoint", plugin.metadata.endpoint,
plugin)
self._add_plugin("by_typename", plugin.metadata.type_name,
plugin)
if isinstance(ext.plugin, list):
for p in ext.plugin:
_load_one(p)
else:
_load_one(ext.plugin)
# (ivasilevskaya) that looks pretty bad as RuntimeError is too general,
# but stevedore has awful exception wrapping with no specific class
# for this very case (no extensions for given namespace found)
try:
self.mgr.map(_add_extention)
except RuntimeError as re:
LOG.error(_LE("Unable to load artifacts: %s") % re.message)
def _version(self, artifact):
return semantic_version.Version.coerce(artifact.metadata.type_version)
def _add_plugin(self, spec, name, plugin):
"""
Inserts a new plugin into a sorted by desc type_version list
of existing plugins in order to retrieve the latest by next()
"""
def _add(name, value):
self.plugin_map[spec][name] = value
old_order = copy.copy(self._get_plugins(name, spec=spec))
for i, p in enumerate(old_order):
if self._version(p) < self._version(plugin):
_add(name, old_order[0:i] + [plugin] + old_order[i:])
return
_add(name, old_order + [plugin])
def _get_plugins(self, name, spec="by_typename"):
if spec not in self.plugin_map.keys():
return []
return self.plugin_map[spec].get(name, [])
def _gen_check_func(self):
"""generates check_func for EnabledExtensionManager"""
def _all_forbidden(ext):
LOG.warn(_LW("Can't load artifact %s: load disabled in config") %
ext.name)
raise exception.ArtifactLoadError(name=ext.name)
def _all_allowed(ext):
LOG.info(
_LI("Artifact %s has been successfully loaded") % ext.name)
return True
if not CONF.load_enabled:
return _all_forbidden
if len(CONF.available_plugins) == 0:
return _all_allowed
available = []
for name in CONF.available_plugins:
type_name, version = (name.split('-', 1)
if '-' in name else (name, None))
available.append((type_name, version))
def _check_ext(ext):
try:
next(n for n, v in available
if n == ext.plugin.metadata.type_name and
(v is None or v == ext.plugin.metadata.type_version))
except StopIteration:
LOG.warn(_LW("Can't load artifact %s: not in"
" available_plugins list") % ext.name)
raise exception.ArtifactLoadError(name=ext.name)
LOG.info(
_LI("Artifact %s has been successfully loaded") % ext.name)
return True
return _check_ext
# this has to be done explicitly as stevedore is pretty ignorant when
# face to face with an Exception and tries to swallow it and print sth
# irrelevant instead of expected error message
def _on_load_failure(self, manager, ep, exc):
msg = (_LE("Could not load plugin from %(module)s: %(msg)s") %
{"module": ep.module_name, "msg": exc})
LOG.error(msg)
raise exc
def _find_class_in_collection(self, collection, name, version=None):
try:
def _cmp_version(plugin, version):
ver = semantic_version.Version.coerce
return (ver(plugin.metadata.type_version) ==
ver(version))
if version:
return next((p for p in collection
if _cmp_version(p, version)))
return next((p for p in collection))
except StopIteration:
raise exception.ArtifactPluginNotFound(
name="%s %s" % (name, "v %s" % version if version else ""))
def get_class_by_endpoint(self, name, version=None):
if version is None:
classlist = self._get_plugins(name, spec="by_endpoint")
if not classlist:
raise exception.ArtifactPluginNotFound(name=name)
return self._find_class_in_collection(classlist, name)
return self._find_class_in_collection(
self._get_plugins(name, spec="by_endpoint"), name, version)
def get_class_by_typename(self, name, version=None):
return self._find_class_in_collection(
self._get_plugins(name, spec="by_typename"), name, version)

View File

@ -1,264 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from daisy.common.artifacts import declarative
from daisy.common.artifacts import definitions
from daisy.common import exception
from daisy import i18n
_ = i18n._
COMMON_ARTIFACT_PROPERTIES = ['id',
'type_name',
'type_version',
'name',
'version',
'description',
'visibility',
'state',
'tags',
'owner',
'created_at',
'updated_at',
'published_at',
'deleted_at']
def _serialize_list_prop(prop, values):
"""
A helper func called to correctly serialize an Array property.
Returns a dict {'type': some_supported_db_type, 'value': serialized_data}
"""
# FIXME(Due to a potential bug in declarative framework, for Arrays, that
# are values to some dict items (Dict(properties={"foo": Array()})),
# prop.get_value(artifact) returns not the real list of items, but the
# whole dict). So we can't rely on prop.get_value(artifact) and will pass
# correctly retrieved values to this function
serialized_value = []
for i, val in enumerate(values or []):
db_type = prop.get_item_definition_at_index(i).DB_TYPE
if db_type is None:
continue
serialized_value.append({
'type': db_type,
'value': val
})
return serialized_value
def _serialize_dict_prop(artifact, prop, key, value, save_prop_func):
key_to_save = prop.name + '.' + key
dict_key_prop = prop.get_prop_definition_at_key(key)
db_type = dict_key_prop.DB_TYPE
if (db_type is None and
not isinstance(dict_key_prop,
declarative.ListAttributeDefinition)):
# nothing to do here, don't know how to deal with this type
return
elif isinstance(dict_key_prop,
declarative.ListAttributeDefinition):
serialized = _serialize_list_prop(
dict_key_prop,
# FIXME(see comment for _serialize_list_prop func)
values=(dict_key_prop.get_value(artifact) or {}).get(key, []))
save_prop_func(key_to_save, 'array', serialized)
else:
save_prop_func(key_to_save, db_type, value)
def _serialize_dependencies(artifact):
"""Returns a dict of serialized dependencies for given artifact"""
dependencies = {}
for relation in artifact.metadata.attributes.dependencies.values():
serialized_dependency = []
if isinstance(relation, declarative.ListAttributeDefinition):
for dep in relation.get_value(artifact):
serialized_dependency.append(dep.id)
else:
relation_data = relation.get_value(artifact)
if relation_data:
serialized_dependency.append(relation.get_value(artifact).id)
dependencies[relation.name] = serialized_dependency
return dependencies
def _serialize_blobs(artifact):
"""Return a dict of serialized blobs for given artifact"""
blobs = {}
for blob in artifact.metadata.attributes.blobs.values():
serialized_blob = []
if isinstance(blob, declarative.ListAttributeDefinition):
for b in blob.get_value(artifact) or []:
serialized_blob.append({
'size': b.size,
'locations': b.locations,
'checksum': b.checksum,
'item_key': b.item_key
})
else:
b = blob.get_value(artifact)
# if no value for blob has been set -> continue
if not b:
continue
serialized_blob.append({
'size': b.size,
'locations': b.locations,
'checksum': b.checksum,
'item_key': b.item_key
})
blobs[blob.name] = serialized_blob
return blobs
def serialize_for_db(artifact):
result = {}
custom_properties = {}
def _save_prop(prop_key, prop_type, value):
custom_properties[prop_key] = {
'type': prop_type,
'value': value
}
for prop in artifact.metadata.attributes.properties.values():
if prop.name in COMMON_ARTIFACT_PROPERTIES:
result[prop.name] = prop.get_value(artifact)
continue
if isinstance(prop, declarative.ListAttributeDefinition):
serialized_value = _serialize_list_prop(prop,
prop.get_value(artifact))
_save_prop(prop.name, 'array', serialized_value)
elif isinstance(prop, declarative.DictAttributeDefinition):
fields_to_set = prop.get_value(artifact) or {}
# if some keys are not present (like in prop == {}), then have to
# set their values to None.
# XXX FIXME prop.properties may be a dict ({'foo': '', 'bar': ''})
# or String\Integer\whatsoever, limiting the possible dict values.
# In the latter case have no idea how to remove old values during
# serialization process.
if isinstance(prop.properties, dict):
for key in [k for k in prop.properties
if k not in fields_to_set.keys()]:
_serialize_dict_prop(artifact, prop, key, None, _save_prop)
# serialize values of properties present
for key, value in six.iteritems(fields_to_set):
_serialize_dict_prop(artifact, prop, key, value, _save_prop)
elif prop.DB_TYPE is not None:
_save_prop(prop.name, prop.DB_TYPE, prop.get_value(artifact))
result['properties'] = custom_properties
result['dependencies'] = _serialize_dependencies(artifact)
result['blobs'] = _serialize_blobs(artifact)
return result
def _deserialize_blobs(artifact_type, blobs_from_db, artifact_properties):
"""Retrieves blobs from database"""
for blob_name, blob_value in six.iteritems(blobs_from_db):
if not blob_value:
continue
if isinstance(artifact_type.metadata.attributes.blobs.get(blob_name),
declarative.ListAttributeDefinition):
val = []
for v in blob_value:
b = definitions.Blob(size=v['size'],
locations=v['locations'],
checksum=v['checksum'],
item_key=v['item_key'])
val.append(b)
elif len(blob_value) == 1:
val = definitions.Blob(size=blob_value[0]['size'],
locations=blob_value[0]['locations'],
checksum=blob_value[0]['checksum'],
item_key=blob_value[0]['item_key'])
else:
raise exception.InvalidArtifactPropertyValue(
message=_('Blob %(name)s may not have multiple values'),
name=blob_name)
artifact_properties[blob_name] = val
def _deserialize_dependencies(artifact_type, deps_from_db,
artifact_properties, plugins):
"""Retrieves dependencies from database"""
for dep_name, dep_value in six.iteritems(deps_from_db):
if not dep_value:
continue
if isinstance(
artifact_type.metadata.attributes.dependencies.get(dep_name),
declarative.ListAttributeDefinition):
val = []
for v in dep_value:
val.append(deserialize_from_db(v, plugins))
elif len(dep_value) == 1:
val = deserialize_from_db(dep_value[0], plugins)
else:
raise exception.InvalidArtifactPropertyValue(
message=_('Relation %(name)s may not have multiple values'),
name=dep_name)
artifact_properties[dep_name] = val
def deserialize_from_db(db_dict, plugins):
artifact_properties = {}
type_name = None
type_version = None
for prop_name in COMMON_ARTIFACT_PROPERTIES:
prop_value = db_dict.pop(prop_name, None)
if prop_name == 'type_name':
type_name = prop_value
elif prop_name == 'type_version':
type_version = prop_value
else:
artifact_properties[prop_name] = prop_value
try:
artifact_type = plugins.get_class_by_typename(type_name, type_version)
except exception.ArtifactPluginNotFound:
raise exception.UnknownArtifactType(name=type_name,
version=type_version)
type_specific_properties = db_dict.pop('properties', {})
for prop_name, prop_value in six.iteritems(type_specific_properties):
prop_type = prop_value.get('type')
prop_value = prop_value.get('value')
if prop_value is None:
continue
if '.' in prop_name: # dict-based property
name, key = prop_name.split('.', 1)
artifact_properties.setdefault(name, {})
if prop_type == 'array':
artifact_properties[name][key] = [item.get('value') for item in
prop_value]
else:
artifact_properties[name][key] = prop_value
elif prop_type == 'array': # list-based property
artifact_properties[prop_name] = [item.get('value') for item in
prop_value]
else:
artifact_properties[prop_name] = prop_value
blobs = db_dict.pop('blobs', {})
_deserialize_blobs(artifact_type, blobs, artifact_properties)
dependencies = db_dict.pop('dependencies', {})
_deserialize_dependencies(artifact_type, dependencies,
artifact_properties, plugins)
return artifact_type(**artifact_properties)

View File

@ -1,5 +0,0 @@
from v1 import artifact as art1
from v2 import artifact as art2
MY_ARTIFACT = [art1.MyArtifact, art2.MyArtifact]

View File

@ -1,29 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from daisy.common.artifacts import definitions
class BaseArtifact(definitions.ArtifactType):
__type_version__ = "1.0"
prop1 = definitions.String()
prop2 = definitions.Integer()
int_list = definitions.Array(item_type=definitions.Integer(max_value=10,
min_value=1))
depends_on = definitions.ArtifactReference(type_name='MyArtifact')
references = definitions.ArtifactReferenceList()
image_file = definitions.BinaryObject()
screenshots = definitions.BinaryObjectList()

View File

@ -1,25 +0,0 @@
[metadata]
name = artifact
version = 0.0.1
description = A sample plugin for artifact loading
author = Inessa Vasilevskaya
author-email = ivasilevskaya@mirantis.com
classifier =
Development Status :: 3 - Alpha
License :: OSI Approved :: Apache Software License
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Intended Audience :: Developers
Environment :: Console
[global]
setup-hooks =
pbr.hooks.setup_hook
[entry_points]
daisy.artifacts.types =
MyArtifact = daisy.contrib.plugins.artifacts_sample:MY_ARTIFACT

View File

@ -1,20 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import setuptools
# all other params will be taken from setup.cfg
setuptools.setup(packages=setuptools.find_packages(),
setup_requires=['pbr'], pbr=True)

View File

@ -1,21 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from daisy.contrib.plugins.artifacts_sample import base
class MyArtifact(base.BaseArtifact):
__type_version__ = "1.0.1"

View File

@ -1,23 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from daisy.common.artifacts import definitions
from daisy.contrib.plugins.artifacts_sample import base
class MyArtifact(base.BaseArtifact):
__type_version__ = "2.0"
depends_on = definitions.ArtifactReference(type_name="MyArtifact")

View File

@ -1 +0,0 @@
python-glanceclient

View File

@ -1,25 +0,0 @@
[metadata]
name = image_artifact_plugin
version = 2.0
description = An artifact plugin for Imaging functionality
author = Alexander Tivelkov
author-email = ativelkov@mirantis.com
classifier =
Development Status :: 3 - Alpha
License :: OSI Approved :: Apache Software License
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Intended Audience :: Developers
Environment :: Console
[global]
setup-hooks =
pbr.hooks.setup_hook
[entry_points]
daisy.artifacts.types =
Image = daisy.contrib.plugins.image_artifact.version_selector:versions

View File

@ -1,20 +0,0 @@
# Copyright 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import setuptools
# all other params will be taken from setup.cfg
setuptools.setup(packages=setuptools.find_packages(),
setup_requires=['pbr'], pbr=True)

View File

@ -1,36 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from daisy.common.artifacts import definitions
class ImageAsAnArtifact(definitions.ArtifactType):
__type_name__ = 'Image'
__endpoint__ = 'images'
file = definitions.BinaryObject(required=True)
disk_format = definitions.String(allowed_values=['ami', 'ari', 'aki',
'vhd', 'vmdk', 'raw',
'qcow2', 'vdi', 'iso'],
required=True,
mutable=False)
container_format = definitions.String(allowed_values=['ami', 'ari',
'aki', 'bare',
'ovf', 'ova'],
required=True,
mutable=False)
min_disk = definitions.Integer(min_value=0, default=0)
min_ram = definitions.Integer(min_value=0, default=0)
virtual_size = definitions.Integer(min_value=0)

View File

@ -1,27 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from daisy.common.artifacts import definitions
import daisy.contrib.plugins.image_artifact.v1.image as v1
class ImageAsAnArtifact(v1.ImageAsAnArtifact):
__type_version__ = '1.1'
icons = definitions.BinaryObjectList()
similar_images = (definitions.
ArtifactReferenceList(references=definitions.
ArtifactReference('Image')))

View File

@ -1,75 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from daisy.common.artifacts import definitions
from daisy.common import exception
import daisy.contrib.plugins.image_artifact.v1_1.image as v1_1
import daisyclient
from daisy import i18n
_ = i18n._
class ImageAsAnArtifact(v1_1.ImageAsAnArtifact):
__type_version__ = '2.0'
file = definitions.BinaryObject(required=False)
legacy_image_id = definitions.String(required=False, mutable=False,
pattern=R'[0-9a-f]{8}-[0-9a-f]{4}'
R'-4[0-9a-f]{3}-[89ab]'
R'[0-9a-f]{3}-[0-9a-f]{12}')
def __pre_publish__(self, context, *args, **kwargs):
super(ImageAsAnArtifact, self).__pre_publish__(*args, **kwargs)
if self.file is None and self.legacy_image_id is None:
raise exception.InvalidArtifactPropertyValue(
message=_("Either a file or a legacy_image_id has to be "
"specified")
)
if self.file is not None and self.legacy_image_id is not None:
raise exception.InvalidArtifactPropertyValue(
message=_("Both file and legacy_image_id may not be "
"specified at the same time"))
if self.legacy_image_id:
glance_endpoint = next(service['endpoints'][0]['publicURL']
for service in context.service_catalog
if service['name'] == 'glance')
try:
client = daisyclient.Client(version=2,
endpoint=glance_endpoint,
token=context.auth_token)
legacy_image = client.images.get(self.legacy_image_id)
except Exception:
raise exception.InvalidArtifactPropertyValue(
message=_('Unable to get legacy image')
)
if legacy_image is not None:
self.file = definitions.Blob(size=legacy_image.size,
locations=[
{
"status": "active",
"value":
legacy_image.direct_url
}],
checksum=legacy_image.checksum,
item_key=legacy_image.id)
else:
raise exception.InvalidArtifactPropertyValue(
message=_("Legacy image was not found")
)

View File

@ -1,19 +0,0 @@
# Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from v1 import image as v1
from v1_1 import image as v1_1
from v2 import image as v2
versions = [v1.ImageAsAnArtifact, v1_1.ImageAsAnArtifact, v2.ImageAsAnArtifact]

View File

@ -33,7 +33,6 @@ import functools
from oslo_log import log as logging
from daisy import artifacts
from daisy.registry.client.v2 import api
@ -293,317 +292,3 @@ def task_delete(client, task_id, session=None):
@_get_client
def task_update(client, task_id, values, session=None):
return client.task_update(task_id=task_id, values=values, session=session)
# Metadef
@_get_client
def metadef_namespace_get_all(
client, marker=None, limit=None, sort_key='created_at',
sort_dir=None, filters=None, session=None):
return client.metadef_namespace_get_all(
marker=marker, limit=limit,
sort_key=sort_key, sort_dir=sort_dir, filters=filters)
@_get_client
def metadef_namespace_get(client, namespace_name, session=None):
return client.metadef_namespace_get(namespace_name=namespace_name)
@_get_client
def metadef_namespace_create(client, values, session=None):
return client.metadef_namespace_create(values=values)
@_get_client
def metadef_namespace_update(
client, namespace_id, namespace_dict,
session=None):
return client.metadef_namespace_update(
namespace_id=namespace_id, namespace_dict=namespace_dict)
@_get_client
def metadef_namespace_delete(client, namespace_name, session=None):
return client.metadef_namespace_delete(
namespace_name=namespace_name)
@_get_client
def metadef_object_get_all(client, namespace_name, session=None):
return client.metadef_object_get_all(
namespace_name=namespace_name)
@_get_client
def metadef_object_get(
client,
namespace_name, object_name, session=None):
return client.metadef_object_get(
namespace_name=namespace_name, object_name=object_name)
@_get_client
def metadef_object_create(
client,
namespace_name, object_dict, session=None):
return client.metadef_object_create(
namespace_name=namespace_name, object_dict=object_dict)
@_get_client
def metadef_object_update(
client,
namespace_name, object_id,
object_dict, session=None):
return client.metadef_object_update(
namespace_name=namespace_name, object_id=object_id,
object_dict=object_dict)
@_get_client
def metadef_object_delete(
client,
namespace_name, object_name,
session=None):
return client.metadef_object_delete(
namespace_name=namespace_name, object_name=object_name)
@_get_client
def metadef_object_delete_namespace_content(
client,
namespace_name, session=None):
return client.metadef_object_delete_namespace_content(
namespace_name=namespace_name)
@_get_client
def metadef_object_count(
client,
namespace_name, session=None):
return client.metadef_object_count(
namespace_name=namespace_name)
@_get_client
def metadef_property_get_all(
client,
namespace_name, session=None):
return client.metadef_property_get_all(
namespace_name=namespace_name)
@_get_client
def metadef_property_get(
client,
namespace_name, property_name,
session=None):
return client.metadef_property_get(
namespace_name=namespace_name, property_name=property_name)
@_get_client
def metadef_property_create(
client,
namespace_name, property_dict,
session=None):
return client.metadef_property_create(
namespace_name=namespace_name, property_dict=property_dict)
@_get_client
def metadef_property_update(
client,
namespace_name, property_id,
property_dict, session=None):
return client.metadef_property_update(
namespace_name=namespace_name, property_id=property_id,
property_dict=property_dict)
@_get_client
def metadef_property_delete(
client,
namespace_name, property_name,
session=None):
return client.metadef_property_delete(
namespace_name=namespace_name, property_name=property_name)
@_get_client
def metadef_property_delete_namespace_content(
client,
namespace_name, session=None):
return client.metadef_property_delete_namespace_content(
namespace_name=namespace_name)
@_get_client
def metadef_property_count(
client,
namespace_name, session=None):
return client.metadef_property_count(
namespace_name=namespace_name)
@_get_client
def metadef_resource_type_create(client, values, session=None):
return client.metadef_resource_type_create(values=values)
@_get_client
def metadef_resource_type_get(
client,
resource_type_name, session=None):
return client.metadef_resource_type_get(
resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_get_all(client, session=None):
return client.metadef_resource_type_get_all()
@_get_client
def metadef_resource_type_delete(
client,
resource_type_name, session=None):
return client.metadef_resource_type_delete(
resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_association_get(
client,
namespace_name, resource_type_name,
session=None):
return client.metadef_resource_type_association_get(
namespace_name=namespace_name, resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_association_create(
client,
namespace_name, values, session=None):
return client.metadef_resource_type_association_create(
namespace_name=namespace_name, values=values)
@_get_client
def metadef_resource_type_association_delete(
client,
namespace_name, resource_type_name, session=None):
return client.metadef_resource_type_association_delete(
namespace_name=namespace_name, resource_type_name=resource_type_name)
@_get_client
def metadef_resource_type_association_get_all_by_namespace(
client,
namespace_name, session=None):
return client.metadef_resource_type_association_get_all_by_namespace(
namespace_name=namespace_name)
@_get_client
def metadef_tag_get_all(client, namespace_name, filters=None, marker=None,
limit=None, sort_key='created_at', sort_dir=None,
session=None):
return client.metadef_tag_get_all(
namespace_name=namespace_name, filters=filters, marker=marker,
limit=limit, sort_key=sort_key, sort_dir=sort_dir, session=session)
@_get_client
def metadef_tag_get(client, namespace_name, name, session=None):
return client.metadef_tag_get(
namespace_name=namespace_name, name=name)
@_get_client
def metadef_tag_create(
client, namespace_name, tag_dict, session=None):
return client.metadef_tag_create(
namespace_name=namespace_name, tag_dict=tag_dict)
@_get_client
def metadef_tag_create_tags(
client, namespace_name, tag_list, session=None):
return client.metadef_tag_create_tags(
namespace_name=namespace_name, tag_list=tag_list)
@_get_client
def metadef_tag_update(
client, namespace_name, id, tag_dict, session=None):
return client.metadef_tag_update(
namespace_name=namespace_name, id=id, tag_dict=tag_dict)
@_get_client
def metadef_tag_delete(
client, namespace_name, name, session=None):
return client.metadef_tag_delete(
namespace_name=namespace_name, name=name)
@_get_client
def metadef_tag_delete_namespace_content(
client, namespace_name, session=None):
return client.metadef_tag_delete_namespace_content(
namespace_name=namespace_name)
@_get_client
def metadef_tag_count(client, namespace_name, session=None):
return client.metadef_tag_count(namespace_name=namespace_name)
@_get_client
def artifact_create(client, values,
type_name, type_version=None, session=None):
return client.artifact_create(values=values,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_update(client, values, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_update(values=values, artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_delete(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_delete(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_get(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_get(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)
@_get_client
def artifact_get_all(client, marker=None, limit=None, sort_key=None,
sort_dir=None, filters={},
show_level=artifacts.Showlevel.NONE, session=None):
return client.artifact_create(marker, limit, sort_key,
sort_dir, filters, show_level)
@_get_client
def artifact_publish(client, artifact_id,
type_name, type_version=None, session=None):
return client.artifact_publish(artifact_id=artifact_id,
type_name=type_name,
type_version=type_version)

View File

@ -42,18 +42,8 @@ import types
import socket
import netaddr
from daisy import artifacts as ga
from daisy.common import exception
from daisy.common import utils
from daisy.db.sqlalchemy import artifacts
from daisy.db.sqlalchemy.metadef_api import namespace as metadef_namespace_api
from daisy.db.sqlalchemy.metadef_api import object as metadef_object_api
from daisy.db.sqlalchemy.metadef_api import property as metadef_property_api
from daisy.db.sqlalchemy.metadef_api\
import resource_type as metadef_resource_type_api
from daisy.db.sqlalchemy.metadef_api\
import resource_type_association as metadef_association_api
from daisy.db.sqlalchemy.metadef_api import tag as metadef_tag_api
from daisy.db.sqlalchemy import models
from daisy import i18n
@ -4406,320 +4396,6 @@ def _task_format(task_ref, task_info_ref=None):
return task_dict
def metadef_namespace_get_all(context, marker=None, limit=None, sort_key=None,
sort_dir=None, filters=None, session=None):
"""List all available namespaces."""
session = session or get_session()
namespaces = metadef_namespace_api.get_all(
context, session, marker, limit, sort_key, sort_dir, filters)
return namespaces
def metadef_namespace_get(context, namespace_name, session=None):
"""Get a namespace or raise if it does not exist or is not visible."""
session = session or get_session()
return metadef_namespace_api.get(
context, namespace_name, session)
def metadef_namespace_create(context, values, session=None):
"""Create a namespace or raise if it already exists."""
session = session or get_session()
return metadef_namespace_api.create(context, values, session)
def metadef_namespace_update(context, namespace_id, namespace_dict,
session=None):
"""Update a namespace or raise if it does not exist or not visible"""
session = session or get_session()
return metadef_namespace_api.\
update(context, namespace_id, namespace_dict, session)
def metadef_namespace_delete(context, namespace_name, session=None):
"""Delete the namespace and all foreign references"""
session = session or get_session()
return metadef_namespace_api.delete_cascade(
context, namespace_name, session)
def metadef_object_get_all(context, namespace_name, session=None):
"""Get a metadata-schema object or raise if it does not exist."""
session = session or get_session()
return metadef_object_api.get_all(
context, namespace_name, session)
def metadef_object_get(context, namespace_name, object_name, session=None):
"""Get a metadata-schema object or raise if it does not exist."""
session = session or get_session()
return metadef_object_api.get(
context, namespace_name, object_name, session)
def metadef_object_create(context, namespace_name, object_dict,
session=None):
"""Create a metadata-schema object or raise if it already exists."""
session = session or get_session()
return metadef_object_api.create(
context, namespace_name, object_dict, session)
def metadef_object_update(context, namespace_name, object_id, object_dict,
session=None):
"""Update an object or raise if it does not exist or not visible."""
session = session or get_session()
return metadef_object_api.update(
context, namespace_name, object_id, object_dict, session)
def metadef_object_delete(context, namespace_name, object_name,
session=None):
"""Delete an object or raise if namespace or object doesn't exist."""
session = session or get_session()
return metadef_object_api.delete(
context, namespace_name, object_name, session)
def metadef_object_delete_namespace_content(
context, namespace_name, session=None):
"""Delete an object or raise if namespace or object doesn't exist."""
session = session or get_session()
return metadef_object_api.delete_by_namespace_name(
context, namespace_name, session)
def metadef_object_count(context, namespace_name, session=None):
"""Get count of properties for a namespace, raise if ns doesn't exist."""
session = session or get_session()
return metadef_object_api.count(context, namespace_name, session)
def metadef_property_get_all(context, namespace_name, session=None):
"""Get a metadef property or raise if it does not exist."""
session = session or get_session()
return metadef_property_api.get_all(context, namespace_name, session)
def metadef_property_get(context, namespace_name,
property_name, session=None):
"""Get a metadef property or raise if it does not exist."""
session = session or get_session()
return metadef_property_api.get(
context, namespace_name, property_name, session)
def metadef_property_create(context, namespace_name, property_dict,
session=None):
"""Create a metadef property or raise if it already exists."""
session = session or get_session()
return metadef_property_api.create(
context, namespace_name, property_dict, session)
def metadef_property_update(context, namespace_name, property_id,
property_dict, session=None):
"""Update an object or raise if it does not exist or not visible."""
session = session or get_session()
return metadef_property_api.update(
context, namespace_name, property_id, property_dict, session)
def metadef_property_delete(context, namespace_name, property_name,
session=None):
"""Delete a property or raise if it or namespace doesn't exist."""
session = session or get_session()
return metadef_property_api.delete(
context, namespace_name, property_name, session)
def metadef_property_delete_namespace_content(
context, namespace_name, session=None):
"""Delete a property or raise if it or namespace doesn't exist."""
session = session or get_session()
return metadef_property_api.delete_by_namespace_name(
context, namespace_name, session)
def metadef_property_count(context, namespace_name, session=None):
"""Get count of properties for a namespace, raise if ns doesn't exist."""
session = session or get_session()
return metadef_property_api.count(context, namespace_name, session)
def metadef_resource_type_create(context, values, session=None):
"""Create a resource_type"""
session = session or get_session()
return metadef_resource_type_api.create(
context, values, session)
def metadef_resource_type_get(context, resource_type_name, session=None):
"""Get a resource_type"""
session = session or get_session()
return metadef_resource_type_api.get(
context, resource_type_name, session)
def metadef_resource_type_get_all(context, session=None):
"""list all resource_types"""
session = session or get_session()
return metadef_resource_type_api.get_all(context, session)
def metadef_resource_type_delete(context, resource_type_name, session=None):
"""Get a resource_type"""
session = session or get_session()
return metadef_resource_type_api.delete(
context, resource_type_name, session)
def metadef_resource_type_association_get(
context, namespace_name, resource_type_name, session=None):
session = session or get_session()
return metadef_association_api.get(
context, namespace_name, resource_type_name, session)
def metadef_resource_type_association_create(
context, namespace_name, values, session=None):
session = session or get_session()
return metadef_association_api.create(
context, namespace_name, values, session)
def metadef_resource_type_association_delete(
context, namespace_name, resource_type_name, session=None):
session = session or get_session()
return metadef_association_api.delete(
context, namespace_name, resource_type_name, session)
def metadef_resource_type_association_get_all_by_namespace(
context, namespace_name, session=None):
session = session or get_session()
return metadef_association_api.\
get_all_by_namespace(context, namespace_name, session)
def metadef_tag_get_all(
context, namespace_name, filters=None, marker=None, limit=None,
sort_key=None, sort_dir=None, session=None):
"""Get metadata-schema tags or raise if none exist."""
session = session or get_session()
return metadef_tag_api.get_all(
context, namespace_name, session,
filters, marker, limit, sort_key, sort_dir)
def metadef_tag_get(context, namespace_name, name, session=None):
"""Get a metadata-schema tag or raise if it does not exist."""
session = session or get_session()
return metadef_tag_api.get(
context, namespace_name, name, session)
def metadef_tag_create(context, namespace_name, tag_dict,
session=None):
"""Create a metadata-schema tag or raise if it already exists."""
session = session or get_session()
return metadef_tag_api.create(
context, namespace_name, tag_dict, session)
def metadef_tag_create_tags(context, namespace_name, tag_list,
session=None):
"""Create a metadata-schema tag or raise if it already exists."""
session = get_session()
return metadef_tag_api.create_tags(
context, namespace_name, tag_list, session)
def metadef_tag_update(context, namespace_name, id, tag_dict,
session=None):
"""Update an tag or raise if it does not exist or not visible."""
session = session or get_session()
return metadef_tag_api.update(
context, namespace_name, id, tag_dict, session)
def metadef_tag_delete(context, namespace_name, name,
session=None):
"""Delete an tag or raise if namespace or tag doesn't exist."""
session = session or get_session()
return metadef_tag_api.delete(
context, namespace_name, name, session)
def metadef_tag_delete_namespace_content(
context, namespace_name, session=None):
"""Delete an tag or raise if namespace or tag doesn't exist."""
session = session or get_session()
return metadef_tag_api.delete_by_namespace_name(
context, namespace_name, session)
def metadef_tag_count(context, namespace_name, session=None):
"""Get count of tags for a namespace, raise if ns doesn't exist."""
session = session or get_session()
return metadef_tag_api.count(context, namespace_name, session)
def artifact_create(context, values, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = artifacts.create(context, values, session, type_name,
type_version)
return artifact
def artifact_delete(context, artifact_id, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = artifacts.delete(context, artifact_id, session, type_name,
type_version)
return artifact
def artifact_update(context, values, artifact_id, type_name,
type_version=None, session=None):
session = session or get_session()
artifact = artifacts.update(context, values, artifact_id, session,
type_name, type_version)
return artifact
def artifact_get(context, artifact_id,
type_name=None,
type_version=None,
show_level=ga.Showlevel.BASIC,
session=None):
session = session or get_session()
return artifacts.get(context, artifact_id, session, type_name,
type_version, show_level)
def artifact_publish(context,
artifact_id,
type_name,
type_version=None,
session=None):
session = session or get_session()
return artifacts.publish(context,
artifact_id,
session,
type_name,
type_version)
def artifact_get_all(context, marker=None, limit=None, sort_keys=None,
sort_dirs=None, filters=None,
show_level=ga.Showlevel.NONE, session=None):
session = session or get_session()
return artifacts.get_all(context, session, marker, limit, sort_keys,
sort_dirs, filters, show_level)
def _project_host_member_format(member_ref):
"""Format a member ref for consumption outside of this module."""
return {

View File

@ -1,756 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import operator
import uuid
from enum import Enum
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_utils import timeutils
import sqlalchemy
from sqlalchemy import and_
from sqlalchemy import or_
import sqlalchemy.orm as orm
from sqlalchemy.orm import joinedload
import daisy.artifacts as ga
from daisy.common import exception
from daisy.common import semver_db
from daisy.db.sqlalchemy import models_artifacts as models
from daisy import i18n
from oslo_log import log as os_logging
LOG = os_logging.getLogger(__name__)
_LW = i18n._LW
_LE = i18n._LE
CONF = cfg.CONF
class Visibility(Enum):
PRIVATE = 'private'
PUBLIC = 'public'
SHARED = 'shared'
class State(Enum):
CREATING = 'creating'
ACTIVE = 'active'
DEACTIVATED = 'deactivated'
DELETED = 'deleted'
TRANSITIONS = {
State.CREATING: [State.ACTIVE, State.DELETED],
State.ACTIVE: [State.DEACTIVATED, State.DELETED],
State.DEACTIVATED: [State.ACTIVE, State.DELETED],
State.DELETED: []
}
def create(context, values, session, type_name, type_version=None):
return _out(_create_or_update(context, values, None, session,
type_name, type_version))
def update(context, values, artifact_id, session,
type_name, type_version=None):
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def delete(context, artifact_id, session, type_name, type_version=None):
values = {'state': 'deleted'}
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def _create_or_update(context, values, artifact_id, session, type_name,
type_version=None):
values = copy.deepcopy(values)
with session.begin():
_set_version_fields(values)
_validate_values(values)
_drop_protected_attrs(models.Artifact, values)
if artifact_id:
# update existing artifact
state = values.get('state')
show_level = ga.Showlevel.BASIC
if state is not None:
if state == 'active':
show_level = ga.Showlevel.DIRECT
values['published_at'] = timeutils.utcnow()
if state == 'deleted':
values['deleted_at'] = timeutils.utcnow()
artifact = _get(context, artifact_id, session, type_name,
type_version, show_level=show_level)
_validate_transition(artifact.state,
values.get('state') or artifact.state)
else:
# create new artifact
artifact = models.Artifact()
if 'id' not in values:
artifact.id = str(uuid.uuid4())
else:
artifact.id = values['id']
if 'tags' in values:
tags = values.pop('tags')
artifact.tags = _do_tags(artifact, tags)
if 'properties' in values:
properties = values.pop('properties', {})
artifact.properties = _do_properties(artifact, properties)
if 'blobs' in values:
blobs = values.pop('blobs')
artifact.blobs = _do_blobs(artifact, blobs)
if 'dependencies' in values:
dependencies = values.pop('dependencies')
_do_dependencies(artifact, dependencies, session)
if values.get('state', None) == 'publish':
artifact.dependencies.extend(
_do_transitive_dependencies(artifact, session))
artifact.update(values)
try:
artifact.save(session=session)
except db_exc.DBDuplicateEntry:
LOG.warn(_LW("Artifact with the specified type, name and version "
"already exists"))
raise exception.ArtifactDuplicateNameTypeVersion()
return artifact
def get(context, artifact_id, session, type_name=None, type_version=None,
show_level=ga.Showlevel.BASIC):
artifact = _get(context, artifact_id, session, type_name, type_version,
show_level)
return _out(artifact, show_level)
def publish(context, artifact_id, session, type_name,
type_version=None):
"""
Because transitive dependencies are not initially created it has to be done
manually by calling this function.
It creates transitive dependencies for the given artifact_id and saves
them in DB.
:returns artifact dict with Transitive show level
"""
values = {'state': 'active'}
return _out(_create_or_update(context, values, artifact_id, session,
type_name, type_version))
def _validate_transition(source_state, target_state):
if target_state == source_state:
return
try:
source_state = State(source_state)
target_state = State(target_state)
except ValueError:
raise exception.InvalidArtifactStateTransition(source=source_state,
target=target_state)
if (source_state not in TRANSITIONS or
target_state not in TRANSITIONS[source_state]):
raise exception.InvalidArtifactStateTransition(source=source_state,
target=target_state)
def _out(artifact, show_level=ga.Showlevel.BASIC, show_text_properties=True):
"""
Transforms sqlalchemy object into dict depending on the show level.
:param artifact: sql
:param show_level: constant from Showlevel class
:param show_text_properties: for performance optimization it's possible
to disable loading of massive text properties
:return: generated dict
"""
res = artifact.to_dict(show_level=show_level,
show_text_properties=show_text_properties)
if show_level >= ga.Showlevel.DIRECT:
dependencies = artifact.dependencies
dependencies.sort(key=lambda elem: (elem.artifact_origin,
elem.name, elem.position))
res['dependencies'] = {}
if show_level == ga.Showlevel.DIRECT:
new_show_level = ga.Showlevel.BASIC
else:
new_show_level = ga.Showlevel.TRANSITIVE
for dep in dependencies:
if dep.artifact_origin == artifact.id:
# make array
for p in res['dependencies'].keys():
if p == dep.name:
# add value to array
res['dependencies'][p].append(
_out(dep.dest, new_show_level))
break
else:
# create new array
deparr = []
deparr.append(_out(dep.dest, new_show_level))
res['dependencies'][dep.name] = deparr
return res
def _get(context, artifact_id, session, type_name=None, type_version=None,
show_level=ga.Showlevel.BASIC):
values = dict(id=artifact_id)
if type_name is not None:
values['type_name'] = type_name
if type_version is not None:
values['type_version'] = type_version
_set_version_fields(values)
try:
if show_level == ga.Showlevel.NONE:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.tags)) \
.filter_by(**values)
else:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.properties)) \
.options(joinedload(models.Artifact.tags)) \
.options(joinedload(models.Artifact.blobs).
joinedload(models.ArtifactBlob.locations)) \
.filter_by(**values)
artifact = query.one()
except orm.exc.NoResultFound:
LOG.warn(_LW("Artifact with id=%s not found") % artifact_id)
raise exception.ArtifactNotFound(id=artifact_id)
if not _check_visibility(context, artifact):
LOG.warn(_LW("Artifact with id=%s is not accessible") % artifact_id)
raise exception.ArtifactForbidden(id=artifact_id)
return artifact
def get_all(context, session, marker=None, limit=None,
sort_keys=None, sort_dirs=None, filters=None,
show_level=ga.Showlevel.NONE):
"""List all visible artifacts"""
filters = filters or {}
artifacts = _get_all(
context, session, filters, marker,
limit, sort_keys, sort_dirs, show_level)
return map(lambda ns: _out(ns, show_level, show_text_properties=False),
artifacts)
def _get_all(context, session, filters=None, marker=None,
limit=None, sort_keys=None, sort_dirs=None,
show_level=ga.Showlevel.NONE):
"""Get all namespaces that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: namespace id after which to start page
:param limit: maximum number of namespaces to return
:param sort_keys: namespace attributes by which results should be sorted
:param sort_dirs: directions in which results should be sorted (asc, desc)
"""
filters = filters or {}
query = _do_artifacts_query(context, session, show_level)
basic_conds, tag_conds, prop_conds = _do_query_filters(filters)
if basic_conds:
for basic_condition in basic_conds:
query = query.filter(and_(*basic_condition))
if tag_conds:
for tag_condition in tag_conds:
query = query.join(models.ArtifactTag, aliased=True).filter(
and_(*tag_condition))
if prop_conds:
for prop_condition in prop_conds:
query = query.join(models.ArtifactProperty, aliased=True).filter(
and_(*prop_condition))
marker_artifact = None
if marker is not None:
marker_artifact = _get(context, marker, session, None, None)
if sort_keys is None:
sort_keys = [('created_at', None), ('id', None)]
sort_dirs = ['desc', 'desc']
else:
for key in [('created_at', None), ('id', None)]:
if key not in sort_keys:
sort_keys.append(key)
sort_dirs.append('desc')
# Note(mfedosin): Kostyl to deal with situation that sqlalchemy cannot
# work with composite keys correctly
if ('version', None) in sort_keys:
i = sort_keys.index(('version', None))
version_sort_dir = sort_dirs[i]
sort_keys[i:i + 1] = [('version_prefix', None),
('version_suffix', None),
('version_meta', None)]
sort_dirs[i:i + 1] = [version_sort_dir] * 3
query = _do_paginate_query(query=query,
limit=limit,
sort_keys=sort_keys,
marker=marker_artifact,
sort_dirs=sort_dirs)
return query.all()
def _do_paginate_query(query, sort_keys=None, sort_dirs=None,
marker=None, limit=None):
# Default the sort direction to ascending
if sort_dirs is None:
sort_dir = 'asc'
# Ensure a per-column sort direction
if sort_dirs is None:
sort_dirs = [sort_dir for _sort_key in sort_keys]
assert(len(sort_dirs) == len(sort_keys))
# Add sorting
for current_sort_key, current_sort_dir in zip(sort_keys, sort_dirs):
try:
sort_dir_func = {
'asc': sqlalchemy.asc,
'desc': sqlalchemy.desc,
}[current_sort_dir]
except KeyError:
raise ValueError(_LE("Unknown sort direction, "
"must be 'desc' or 'asc'"))
if current_sort_key[1] is None:
# sort by generic property
query = query.order_by(sort_dir_func(getattr(
models.Artifact,
current_sort_key[0])))
else:
# sort by custom property
prop_type = current_sort_key[1] + "_value"
query = query.join(models.ArtifactProperty).\
filter(
models.ArtifactProperty.name == current_sort_key[0]).\
order_by(
sort_dir_func(getattr(models.ArtifactProperty,
prop_type)))
# Add pagination
if marker is not None:
marker_values = []
for sort_key in sort_keys:
v = getattr(marker, sort_key[0])
marker_values.append(v)
# Build up an array of sort criteria as in the docstring
criteria_list = []
for i in range(len(sort_keys)):
crit_attrs = []
for j in range(i):
if sort_keys[j][1] is None:
model_attr = getattr(models.Artifact, sort_keys[j][0])
else:
model_attr = getattr(models.ArtifactProperty,
sort_keys[j][1] + "_value")
crit_attrs.append((model_attr == marker_values[j]))
if sort_keys[i][1] is None:
model_attr = getattr(models.Artifact, sort_keys[j][0])
else:
model_attr = getattr(models.ArtifactProperty,
sort_keys[j][1] + "_value")
if sort_dirs[i] == 'desc':
crit_attrs.append((model_attr < marker_values[i]))
else:
crit_attrs.append((model_attr > marker_values[i]))
criteria = and_(*crit_attrs)
criteria_list.append(criteria)
f = or_(*criteria_list)
query = query.filter(f)
if limit is not None:
query = query.limit(limit)
return query
def _do_artifacts_query(context, session, show_level=ga.Showlevel.NONE):
"""Build the query to get all artifacts based on the context"""
LOG.debug("context.is_admin=%(is_admin)s; context.owner=%(owner)s" %
{'is_admin': context.is_admin, 'owner': context.owner})
if show_level == ga.Showlevel.NONE:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.tags))
elif show_level == ga.Showlevel.BASIC:
query = session.query(models.Artifact) \
.options(joinedload(models.Artifact.properties)
.defer(models.ArtifactProperty.text_value)) \
.options(joinedload(models.Artifact.tags)) \
.options(joinedload(models.Artifact.blobs).
joinedload(models.ArtifactBlob.locations))
else:
# other show_levels aren't supported
msg = _LW("Show level %s is not supported in this "
"operation") % ga.Showlevel.to_str(show_level)
LOG.warn(msg)
raise exception.ArtifactUnsupportedShowLevel(shl=show_level)
# If admin, return everything.
if context.is_admin:
return query
else:
# If regular user, return only public artifacts.
# However, if context.owner has a value, return both
# public and private artifacts of the context.owner.
if context.owner is not None:
query = query.filter(
or_(models.Artifact.owner == context.owner,
models.Artifact.visibility == 'public'))
else:
query = query.filter(
models.Artifact.visibility == 'public')
return query
op_mappings = {
'EQ': operator.eq,
'GT': operator.gt,
'GE': operator.ge,
'LT': operator.lt,
'LE': operator.le,
'NE': operator.ne,
'IN': operator.eq # it must be eq
}
def _do_query_filters(filters):
basic_conds = []
tag_conds = []
prop_conds = []
# don't show deleted artifacts
basic_conds.append([models.Artifact.state != 'deleted'])
visibility = filters.pop('visibility', None)
if visibility is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.visibility == visibility['value']])
type_name = filters.pop('type_name', None)
if type_name is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.type_name == type_name['value']])
type_version = filters.pop('type_version', None)
if type_version is not None:
# ignore operator. always consider it EQ
# TODO(mfedosin) add support of LIKE operator
type_version = semver_db.parse(type_version['value'])
basic_conds.append([models.Artifact.type_version == type_version])
name = filters.pop('name', None)
if name is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.name == name['value']])
version = filters.pop('version', None)
if version is not None:
# ignore operator. always consider it EQ
# TODO(mfedosin) add support of LIKE operator
version = semver_db.parse(version['value'])
basic_conds.append([models.Artifact.version == version])
state = filters.pop('state', None)
if state is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.state == state['value']])
owner = filters.pop('owner', None)
if owner is not None:
# ignore operator. always consider it EQ
basic_conds.append([models.Artifact.owner == owner['value']])
id_list = filters.pop('id_list', None)
if id_list is not None:
basic_conds.append([models.Artifact.id.in_(id_list['value'])])
name_list = filters.pop('name_list', None)
if name_list is not None:
basic_conds.append([models.Artifact.name.in_(name_list['value'])])
tags = filters.pop('tags', None)
if tags is not None:
for tag in tags['value']:
tag_conds.append([models.ArtifactTag.value == tag])
# process remaining filters
for filtername, filtervalue in filters.items():
db_prop_op = filtervalue['operator']
db_prop_value = filtervalue['value']
db_prop_type = filtervalue['type'] + "_value"
db_prop_position = filtervalue.get('position')
conds = [models.ArtifactProperty.name == filtername]
if db_prop_op in op_mappings:
fn = op_mappings[db_prop_op]
result = fn(getattr(models.ArtifactProperty, db_prop_type),
db_prop_value)
cond = [result,
models.ArtifactProperty.position == db_prop_position]
if db_prop_op == 'IN':
if db_prop_position is not None:
msg = _LE("Cannot use this parameter with "
"the operator IN")
LOG.error(msg)
raise exception.ArtifactInvalidPropertyParameter(op='IN')
cond = [result,
models.ArtifactProperty.position >= 0]
else:
msg = _LE("Operator %s is not supported") % db_prop_op
LOG.error(msg)
raise exception.ArtifactUnsupportedPropertyOperator(op=db_prop_op)
conds.extend(cond)
prop_conds.append(conds)
return basic_conds, tag_conds, prop_conds
def _do_tags(artifact, new_tags):
tags_to_update = []
# don't touch existing tags
for tag in artifact.tags:
if tag.value in new_tags:
tags_to_update.append(tag)
new_tags.remove(tag.value)
# add new tags
for tag in new_tags:
db_tag = models.ArtifactTag()
db_tag.value = tag
tags_to_update.append(db_tag)
return tags_to_update
def _do_property(propname, prop, position=None):
db_prop = models.ArtifactProperty()
db_prop.name = propname
setattr(db_prop,
(prop['type'] + "_value"),
prop['value'])
db_prop.position = position
return db_prop
def _do_properties(artifact, new_properties):
props_to_update = []
# don't touch existing properties
for prop in artifact.properties:
if prop.name not in new_properties:
props_to_update.append(prop)
for propname, prop in new_properties.items():
if prop['type'] == 'array':
for pos, arrprop in enumerate(prop['value']):
props_to_update.append(
_do_property(propname, arrprop, pos)
)
else:
props_to_update.append(
_do_property(propname, prop)
)
return props_to_update
def _do_blobs(artifact, new_blobs):
blobs_to_update = []
# don't touch existing blobs
for blob in artifact.blobs:
if blob.name not in new_blobs:
blobs_to_update.append(blob)
for blobname, blobs in new_blobs.items():
for pos, blob in enumerate(blobs):
for db_blob in artifact.blobs:
if db_blob.name == blobname and db_blob.position == pos:
# update existing blobs
db_blob.size = blob['size']
db_blob.checksum = blob['checksum']
db_blob.item_key = blob['item_key']
db_blob.locations = _do_locations(db_blob,
blob['locations'])
blobs_to_update.append(db_blob)
break
else:
# create new blob
db_blob = models.ArtifactBlob()
db_blob.name = blobname
db_blob.size = blob['size']
db_blob.checksum = blob['checksum']
db_blob.item_key = blob['item_key']
db_blob.position = pos
db_blob.locations = _do_locations(db_blob, blob['locations'])
blobs_to_update.append(db_blob)
return blobs_to_update
def _do_locations(blob, new_locations):
locs_to_update = []
for pos, loc in enumerate(new_locations):
for db_loc in blob.locations:
if db_loc.value == loc['value']:
# update existing location
db_loc.position = pos
db_loc.status = loc['status']
locs_to_update.append(db_loc)
break
else:
# create new location
db_loc = models.ArtifactBlobLocation()
db_loc.value = loc['value']
db_loc.status = loc['status']
db_loc.position = pos
locs_to_update.append(db_loc)
return locs_to_update
def _do_dependencies(artifact, new_dependencies, session):
deps_to_update = []
# small check that all dependencies are new
if artifact.dependencies is not None:
for db_dep in artifact.dependencies:
for dep in new_dependencies.keys():
if db_dep.name == dep:
msg = _LW("Artifact with the specified type, name "
"and versions already has the direct "
"dependency=%s") % dep
LOG.warn(msg)
# change values of former dependency
for dep in artifact.dependencies:
session.delete(dep)
artifact.dependencies = []
for depname, depvalues in new_dependencies.items():
for pos, depvalue in enumerate(depvalues):
db_dep = models.ArtifactDependency()
db_dep.name = depname
db_dep.artifact_source = artifact.id
db_dep.artifact_dest = depvalue
db_dep.artifact_origin = artifact.id
db_dep.is_direct = True
db_dep.position = pos
deps_to_update.append(db_dep)
artifact.dependencies = deps_to_update
def _do_transitive_dependencies(artifact, session):
deps_to_update = []
for dependency in artifact.dependencies:
depvalue = dependency.artifact_dest
transitdeps = session.query(models.ArtifactDependency). \
filter_by(artifact_source=depvalue).all()
for transitdep in transitdeps:
if not transitdep.is_direct:
# transitive dependencies are already created
msg = _LW("Artifact with the specified type, "
"name and version already has the "
"direct dependency=%d") % transitdep.id
LOG.warn(msg)
raise exception.ArtifactDuplicateTransitiveDependency(
dep=transitdep.id)
db_dep = models.ArtifactDependency()
db_dep.name = transitdep['name']
db_dep.artifact_source = artifact.id
db_dep.artifact_dest = transitdep.artifact_dest
db_dep.artifact_origin = transitdep.artifact_source
db_dep.is_direct = False
db_dep.position = transitdep.position
deps_to_update.append(db_dep)
return deps_to_update
def _check_visibility(context, artifact):
if context.is_admin:
return True
if not artifact.owner:
return True
if artifact.visibility == Visibility.PUBLIC.value:
return True
if artifact.visibility == Visibility.PRIVATE.value:
if context.owner and context.owner == artifact.owner:
return True
else:
return False
if artifact.visibility == Visibility.SHARED.value:
return False
return False
def _set_version_fields(values):
if 'type_version' in values:
values['type_version'] = semver_db.parse(values['type_version'])
if 'version' in values:
values['version'] = semver_db.parse(values['version'])
def _validate_values(values):
if 'state' in values:
try:
State(values['state'])
except ValueError:
msg = "Invalid artifact state '%s'" % values['state']
raise exception.Invalid(msg)
if 'visibility' in values:
try:
Visibility(values['visibility'])
except ValueError:
msg = "Invalid artifact visibility '%s'" % values['visibility']
raise exception.Invalid(msg)
# TODO(mfedosin): it's an idea to validate tags someday
# (check that all tags match the regexp)
def _drop_protected_attrs(model_class, values):
"""
Removed protected attributes from values dictionary using the models
__protected_attributes__ field.
"""
for attr in model_class.__protected_attributes__:
if attr in values:
del values[attr]

View File

@ -1,483 +0,0 @@
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2013 OpenStack Foundation
# Copyright 2013 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import os
from os.path import isfile
from os.path import join
import re
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import timeutils
import six
import sqlalchemy
from sqlalchemy import and_
from sqlalchemy.schema import MetaData
from sqlalchemy.sql import select
from daisy.common import utils
from daisy import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LE = i18n._LE
_LW = i18n._LW
_LI = i18n._LI
metadata_opts = [
cfg.StrOpt('metadata_source_path', default='/etc/glance/metadefs/',
help=_('Path to the directory where json metadata '
'files are stored'))
]
CONF = cfg.CONF
CONF.register_opts(metadata_opts)
def get_metadef_namespaces_table(meta):
return sqlalchemy.Table('metadef_namespaces', meta, autoload=True)
def get_metadef_resource_types_table(meta):
return sqlalchemy.Table('metadef_resource_types', meta, autoload=True)
def get_metadef_namespace_resource_types_table(meta):
return sqlalchemy.Table('metadef_namespace_resource_types', meta,
autoload=True)
def get_metadef_properties_table(meta):
return sqlalchemy.Table('metadef_properties', meta, autoload=True)
def get_metadef_objects_table(meta):
return sqlalchemy.Table('metadef_objects', meta, autoload=True)
def get_metadef_tags_table(meta):
return sqlalchemy.Table('metadef_tags', meta, autoload=True)
def _get_resource_type_id(meta, name):
rt_table = get_metadef_resource_types_table(meta)
resource_type = (
select([rt_table.c.id]).
where(rt_table.c.name == name).
select_from(rt_table).
execute().fetchone())
if resource_type:
return resource_type[0]
return None
def _get_resource_type(meta, resource_type_id):
rt_table = get_metadef_resource_types_table(meta)
return (
rt_table.select().
where(rt_table.c.id == resource_type_id).
execute().fetchone())
def _get_namespace_resource_types(meta, namespace_id):
namespace_resource_types_table =\
get_metadef_namespace_resource_types_table(meta)
return namespace_resource_types_table.select().\
where(namespace_resource_types_table.c.namespace_id == namespace_id).\
execute().fetchall()
def _get_namespace_resource_type_by_ids(meta, namespace_id, rt_id):
namespace_resource_types_table =\
get_metadef_namespace_resource_types_table(meta)
return namespace_resource_types_table.select().\
where(and_(
namespace_resource_types_table.c.namespace_id == namespace_id,
namespace_resource_types_table.c.resource_type_id == rt_id)).\
execute().fetchone()
def _get_properties(meta, namespace_id):
properties_table = get_metadef_properties_table(meta)
return properties_table.select().\
where(properties_table.c.namespace_id == namespace_id).\
execute().fetchall()
def _get_objects(meta, namespace_id):
objects_table = get_metadef_objects_table(meta)
return objects_table.select().\
where(objects_table.c.namespace_id == namespace_id).\
execute().fetchall()
def _get_tags(meta, namespace_id):
tags_table = get_metadef_tags_table(meta)
return (
tags_table.select().
where(tags_table.c.namespace_id == namespace_id).
execute().fetchall())
def _get_resource_id(table, namespace_id, resource_name):
resource = (
select([table.c.id]).
where(and_(table.c.namespace_id == namespace_id,
table.c.name == resource_name)).
select_from(table).
execute().fetchone())
if resource:
return resource[0]
return None
def _clear_metadata(meta):
metadef_tables = [get_metadef_properties_table(meta),
get_metadef_objects_table(meta),
get_metadef_tags_table(meta),
get_metadef_namespace_resource_types_table(meta),
get_metadef_namespaces_table(meta),
get_metadef_resource_types_table(meta)]
for table in metadef_tables:
table.delete().execute()
LOG.info(_LI("Table %s has been cleared"), table)
def _clear_namespace_metadata(meta, namespace_id):
metadef_tables = [get_metadef_properties_table(meta),
get_metadef_objects_table(meta),
get_metadef_tags_table(meta),
get_metadef_namespace_resource_types_table(meta)]
namespaces_table = get_metadef_namespaces_table(meta)
for table in metadef_tables:
table.delete().where(table.c.namespace_id == namespace_id).execute()
namespaces_table.delete().where(
namespaces_table.c.id == namespace_id).execute()
def _populate_metadata(meta, metadata_path=None, merge=False,
prefer_new=False, overwrite=False):
if not metadata_path:
metadata_path = CONF.metadata_source_path
try:
if isfile(metadata_path):
json_schema_files = [metadata_path]
else:
json_schema_files = [f for f in os.listdir(metadata_path)
if isfile(join(metadata_path, f)) and
f.endswith('.json')]
except OSError as e:
LOG.error(utils.exception_to_str(e))
return
if not json_schema_files:
LOG.error(_LE("Json schema files not found in %s. Aborting."),
metadata_path)
return
namespaces_table = get_metadef_namespaces_table(meta)
namespace_rt_table = get_metadef_namespace_resource_types_table(meta)
objects_table = get_metadef_objects_table(meta)
tags_table = get_metadef_tags_table(meta)
properties_table = get_metadef_properties_table(meta)
resource_types_table = get_metadef_resource_types_table(meta)
for json_schema_file in json_schema_files:
try:
file = join(metadata_path, json_schema_file)
with open(file) as json_file:
metadata = json.load(json_file)
except Exception as e:
LOG.error(utils.exception_to_str(e))
continue
values = {
'namespace': metadata.get('namespace', None),
'display_name': metadata.get('display_name', None),
'description': metadata.get('description', None),
'visibility': metadata.get('visibility', None),
'protected': metadata.get('protected', None),
'owner': metadata.get('owner', 'admin')
}
db_namespace = select(
[namespaces_table.c.id]
).where(
namespaces_table.c.namespace == values['namespace']
).select_from(
namespaces_table
).execute().fetchone()
if db_namespace and overwrite:
LOG.info(_LI("Overwriting namespace %s"), values['namespace'])
_clear_namespace_metadata(meta, db_namespace[0])
db_namespace = None
if not db_namespace:
values.update({'created_at': timeutils.utcnow()})
_insert_data_to_db(namespaces_table, values)
db_namespace = select(
[namespaces_table.c.id]
).where(
namespaces_table.c.namespace == values['namespace']
).select_from(
namespaces_table
).execute().fetchone()
elif not merge:
LOG.info(_LI("Skipping namespace %s. It already exists in the "
"database."), values['namespace'])
continue
elif prefer_new:
values.update({'updated_at': timeutils.utcnow()})
_update_data_in_db(namespaces_table, values,
namespaces_table.c.id, db_namespace[0])
namespace_id = db_namespace[0]
for resource_type in metadata.get('resource_type_associations', []):
rt_id = _get_resource_type_id(meta, resource_type['name'])
if not rt_id:
val = {
'name': resource_type['name'],
'created_at': timeutils.utcnow(),
'protected': True
}
_insert_data_to_db(resource_types_table, val)
rt_id = _get_resource_type_id(meta, resource_type['name'])
elif prefer_new:
val = {'updated_at': timeutils.utcnow()}
_update_data_in_db(resource_types_table, val,
resource_types_table.c.id, rt_id)
values = {
'namespace_id': namespace_id,
'resource_type_id': rt_id,
'properties_target': resource_type.get(
'properties_target', None),
'prefix': resource_type.get('prefix', None)
}
namespace_resource_type = _get_namespace_resource_type_by_ids(
meta, namespace_id, rt_id)
if not namespace_resource_type:
values.update({'created_at': timeutils.utcnow()})
_insert_data_to_db(namespace_rt_table, values)
elif prefer_new:
values.update({'updated_at': timeutils.utcnow()})
_update_rt_association(namespace_rt_table, values,
rt_id, namespace_id)
for property, schema in six.iteritems(metadata.get('properties',
{})):
values = {
'name': property,
'namespace_id': namespace_id,
'json_schema': json.dumps(schema)
}
property_id = _get_resource_id(properties_table,
namespace_id, property)
if not property_id:
values.update({'created_at': timeutils.utcnow()})
_insert_data_to_db(properties_table, values)
elif prefer_new:
values.update({'updated_at': timeutils.utcnow()})
_update_data_in_db(properties_table, values,
properties_table.c.id, property_id)
for object in metadata.get('objects', []):
values = {
'name': object['name'],
'description': object.get('description', None),
'namespace_id': namespace_id,
'json_schema': json.dumps(
object.get('properties', None))
}
object_id = _get_resource_id(objects_table, namespace_id,
object['name'])
if not object_id:
values.update({'created_at': timeutils.utcnow()})
_insert_data_to_db(objects_table, values)
elif prefer_new:
values.update({'updated_at': timeutils.utcnow()})
_update_data_in_db(objects_table, values,
objects_table.c.id, object_id)
for tag in metadata.get('tags', []):
values = {
'name': tag.get('name'),
'namespace_id': namespace_id,
}
tag_id = _get_resource_id(tags_table, namespace_id, tag['name'])
if not tag_id:
values.update({'created_at': timeutils.utcnow()})
_insert_data_to_db(tags_table, values)
elif prefer_new:
values.update({'updated_at': timeutils.utcnow()})
_update_data_in_db(tags_table, values,
tags_table.c.id, tag_id)
LOG.info(_LI("File %s loaded to database."), file)
LOG.info(_LI("Metadata loading finished"))
def _insert_data_to_db(table, values, log_exception=True):
try:
table.insert(values=values).execute()
except sqlalchemy.exc.IntegrityError:
if log_exception:
LOG.warning(_LW("Duplicate entry for values: %s"), values)
def _update_data_in_db(table, values, column, value):
try:
(table.update(values=values).
where(column == value).execute())
except sqlalchemy.exc.IntegrityError:
LOG.warning(_LW("Duplicate entry for values: %s"), values)
def _update_rt_association(table, values, rt_id, namespace_id):
try:
(table.update(values=values).
where(and_(table.c.resource_type_id == rt_id,
table.c.namespace_id == namespace_id)).execute())
except sqlalchemy.exc.IntegrityError:
LOG.warning(_LW("Duplicate entry for values: %s"), values)
def _export_data_to_file(meta, path):
if not path:
path = CONF.metadata_source_path
namespace_table = get_metadef_namespaces_table(meta)
namespaces = namespace_table.select().execute().fetchall()
pattern = re.compile('[\W_]+', re.UNICODE)
for id, namespace in enumerate(namespaces, start=1):
namespace_id = namespace['id']
namespace_file_name = pattern.sub('', namespace['display_name'])
values = {
'namespace': namespace['namespace'],
'display_name': namespace['display_name'],
'description': namespace['description'],
'visibility': namespace['visibility'],
'protected': namespace['protected'],
'resource_type_associations': [],
'properties': {},
'objects': [],
'tags': []
}
namespace_resource_types = _get_namespace_resource_types(meta,
namespace_id)
db_objects = _get_objects(meta, namespace_id)
db_properties = _get_properties(meta, namespace_id)
db_tags = _get_tags(meta, namespace_id)
resource_types = []
for namespace_resource_type in namespace_resource_types:
resource_type =\
_get_resource_type(meta,
namespace_resource_type['resource_type_id'])
resource_types.append({
'name': resource_type['name'],
'prefix': namespace_resource_type['prefix'],
'properties_target': namespace_resource_type[
'properties_target']
})
values.update({
'resource_type_associations': resource_types
})
objects = []
for object in db_objects:
objects.append({
"name": object['name'],
"description": object['description'],
"properties": json.loads(object['json_schema'])
})
values.update({
'objects': objects
})
properties = {}
for property in db_properties:
properties.update({
property['name']: json.loads(property['json_schema'])
})
values.update({
'properties': properties
})
tags = []
for tag in db_tags:
tags.append({
"name": tag['name']
})
values.update({
'tags': tags
})
try:
file_name = ''.join([path, namespace_file_name, '.json'])
with open(file_name, 'w') as json_file:
json_file.write(json.dumps(values))
except Exception as e:
LOG.exception(utils.exception_to_str(e))
LOG.info(_LI("Namespace %(namespace)s saved in %(file)s") % {
'namespace': namespace_file_name, 'file': file_name})
def db_load_metadefs(engine, metadata_path=None, merge=False,
prefer_new=False, overwrite=False):
meta = MetaData()
meta.bind = engine
if not merge and (prefer_new or overwrite):
LOG.error(_LE("To use --prefer_new or --overwrite you need to combine "
"of these options with --merge option."))
return
if prefer_new and overwrite and merge:
LOG.error(_LE("Please provide no more than one option from this list: "
"--prefer_new, --overwrite"))
return
_populate_metadata(meta, metadata_path, merge, prefer_new, overwrite)
def db_unload_metadefs(engine):
meta = MetaData()
meta.bind = engine
_clear_metadata(meta)
def db_export_metadefs(engine, metadata_path=None):
meta = MetaData()
meta.bind = engine
_export_data_to_file(meta, metadata_path)

View File

@ -1,310 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy.utils import paginate_query
from oslo_log import log as logging
import sqlalchemy.exc as sa_exc
from sqlalchemy import or_
import sqlalchemy.orm as sa_orm
from daisy.common import exception as exc
import daisy.db.sqlalchemy.metadef_api as metadef_api
from daisy.db.sqlalchemy import models_metadef as models
from daisy import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LW = i18n._LW
def _is_namespace_visible(context, namespace, status=None):
"""Return True if the namespace is visible in this context."""
# Is admin == visible
if context.is_admin:
return True
# No owner == visible
if namespace['owner'] is None:
return True
# Is public == visible
if 'visibility' in namespace:
if namespace['visibility'] == 'public':
return True
# context.owner has a value and is the namespace owner == visible
if context.owner is not None:
if context.owner == namespace['owner']:
return True
# Private
return False
def _select_namespaces_query(context, session):
"""Build the query to get all namespaces based on the context"""
LOG.debug("context.is_admin=%(is_admin)s; context.owner=%(owner)s" %
{'is_admin': context.is_admin, 'owner': context.owner})
# If admin, return everything.
query_ns = session.query(models.MetadefNamespace)
if context.is_admin:
return query_ns
else:
# If regular user, return only public namespaces.
# However, if context.owner has a value, return both
# public and private namespaces of the context.owner.
if context.owner is not None:
query = (
query_ns.filter(
or_(models.MetadefNamespace.owner == context.owner,
models.MetadefNamespace.visibility == 'public')))
else:
query = query_ns.filter(
models.MetadefNamespace.visibility == 'public')
return query
def _get(context, namespace_id, session):
"""Get a namespace by id, raise if not found"""
try:
query = session.query(models.MetadefNamespace)\
.filter_by(id=namespace_id)
namespace_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = (_("Metadata definition namespace not found for id=%s")
% namespace_id)
LOG.warn(msg)
raise exc.MetadefNamespaceNotFound(msg)
# Make sure they are allowed to view it.
if not _is_namespace_visible(context, namespace_rec.to_dict()):
msg = ("Forbidding request, metadata definition namespace=%s"
" is not visible.") % namespace_rec.namespace
LOG.debug(msg)
emsg = _("Forbidding request, metadata definition namespace=%s"
" is not visible.") % namespace_rec.namespace
raise exc.MetadefForbidden(emsg)
return namespace_rec
def _get_by_name(context, name, session):
"""Get a namespace by name, raise if not found"""
try:
query = session.query(models.MetadefNamespace)\
.filter_by(namespace=name)
namespace_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = "Metadata definition namespace=%s was not found." % name
LOG.debug(msg)
raise exc.MetadefNamespaceNotFound(namespace_name=name)
# Make sure they are allowed to view it.
if not _is_namespace_visible(context, namespace_rec.to_dict()):
msg = ("Forbidding request, metadata definition namespace=%s"
" is not visible." % name)
LOG.debug(msg)
emsg = _("Forbidding request, metadata definition namespace=%s"
" is not visible.") % name
raise exc.MetadefForbidden(emsg)
return namespace_rec
def _get_all(context, session, filters=None, marker=None,
limit=None, sort_key='created_at', sort_dir='desc'):
"""Get all namespaces that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: namespace id after which to start page
:param limit: maximum number of namespaces to return
:param sort_key: namespace attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
"""
filters = filters or {}
query = _select_namespaces_query(context, session)
# if visibility filter, apply it to the context based query
visibility = filters.pop('visibility', None)
if visibility is not None:
query = query.filter(models.MetadefNamespace.visibility == visibility)
# if id_list filter, apply it to the context based query
id_list = filters.pop('id_list', None)
if id_list is not None:
query = query.filter(models.MetadefNamespace.id.in_(id_list))
marker_namespace = None
if marker is not None:
marker_namespace = _get(context, marker, session)
sort_keys = ['created_at', 'id']
sort_keys.insert(0, sort_key) if sort_key not in sort_keys else sort_keys
query = paginate_query(query=query,
model=models.MetadefNamespace,
limit=limit,
sort_keys=sort_keys,
marker=marker_namespace, sort_dir=sort_dir)
return query.all()
def _get_all_by_resource_types(context, session, filters, marker=None,
limit=None, sort_key=None, sort_dir=None):
"""get all visible namespaces for the specified resource_types"""
resource_types = filters['resource_types']
resource_type_list = resource_types.split(',')
db_recs = (
session.query(models.MetadefResourceType)
.join(models.MetadefResourceType.associations)
.filter(models.MetadefResourceType.name.in_(resource_type_list))
.values(models.MetadefResourceType.name,
models.MetadefNamespaceResourceType.namespace_id)
)
namespace_id_list = []
for name, namespace_id in db_recs:
namespace_id_list.append(namespace_id)
if len(namespace_id_list) is 0:
return []
filters2 = filters
filters2.update({'id_list': namespace_id_list})
return _get_all(context, session, filters2,
marker, limit, sort_key, sort_dir)
def get_all(context, session, marker=None, limit=None,
sort_key=None, sort_dir=None, filters=None):
"""List all visible namespaces"""
namespaces = []
filters = filters or {}
if 'resource_types' in filters:
namespaces = _get_all_by_resource_types(
context, session, filters, marker, limit, sort_key, sort_dir)
else:
namespaces = _get_all(
context, session, filters, marker, limit, sort_key, sort_dir)
return map(lambda ns: ns.to_dict(), namespaces)
def get(context, name, session):
"""Get a namespace by name, raise if not found"""
namespace_rec = _get_by_name(context, name, session)
return namespace_rec.to_dict()
def create(context, values, session):
"""Create a namespace, raise if namespace already exists."""
namespace_name = values['namespace']
namespace = models.MetadefNamespace()
metadef_api.utils.drop_protected_attrs(models.MetadefNamespace, values)
namespace.update(values.copy())
try:
namespace.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Can not create the metadata definition namespace."
" Namespace=%s already exists.") % namespace_name
LOG.debug(msg)
raise exc.MetadefDuplicateNamespace(
namespace_name=namespace_name)
return namespace.to_dict()
def update(context, namespace_id, values, session):
"""Update a namespace, raise if not found/visible or duplicate result"""
namespace_rec = _get(context, namespace_id, session)
metadef_api.utils.drop_protected_attrs(models.MetadefNamespace, values)
try:
namespace_rec.update(values.copy())
namespace_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Invalid update. It would result in a duplicate"
" metadata definition namespace with the same name of %s"
% values['namespace'])
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition namespace with the same name of %s")
% values['namespace'])
raise exc.MetadefDuplicateNamespace(emsg)
return namespace_rec.to_dict()
def delete(context, name, session):
"""Raise if not found, has references or not visible"""
namespace_rec = _get_by_name(context, name, session)
try:
session.delete(namespace_rec)
session.flush()
except db_exc.DBError as e:
if isinstance(e.inner_exception, sa_exc.IntegrityError):
msg = ("Metadata definition namespace=%s not deleted."
" Other records still refer to it." % name)
LOG.debug(msg)
raise exc.MetadefIntegrityError(
record_type='namespace', record_name=name)
else:
raise e
return namespace_rec.to_dict()
def delete_cascade(context, name, session):
"""Raise if not found, has references or not visible"""
namespace_rec = _get_by_name(context, name, session)
with session.begin():
try:
metadef_api.tag.delete_namespace_content(
context, namespace_rec.id, session)
metadef_api.object.delete_namespace_content(
context, namespace_rec.id, session)
metadef_api.property.delete_namespace_content(
context, namespace_rec.id, session)
metadef_api.resource_type_association.delete_namespace_content(
context, namespace_rec.id, session)
session.delete(namespace_rec)
session.flush()
except db_exc.DBError as e:
if isinstance(e.inner_exception, sa_exc.IntegrityError):
msg = ("Metadata definition namespace=%s not deleted."
" Other records still refer to it." % name)
LOG.debug(msg)
raise exc.MetadefIntegrityError(
record_type='namespace', record_name=name)
else:
raise e
return namespace_rec.to_dict()

View File

@ -1,158 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
from sqlalchemy import func
import sqlalchemy.orm as sa_orm
from daisy.common import exception as exc
from daisy.db.sqlalchemy.metadef_api import namespace as namespace_api
import daisy.db.sqlalchemy.metadef_api.utils as metadef_utils
from daisy.db.sqlalchemy import models_metadef as models
from daisy import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LW = i18n._LW
def _get(context, object_id, session):
try:
query = session.query(models.MetadefObject)\
.filter_by(id=object_id)
metadef_object = query.one()
except sa_orm.exc.NoResultFound:
msg = (_("Metadata definition object not found for id=%s")
% object_id)
LOG.warn(msg)
raise exc.MetadefObjectNotFound(msg)
return metadef_object
def _get_by_name(context, namespace_name, name, session):
namespace = namespace_api.get(context, namespace_name, session)
try:
query = session.query(models.MetadefObject)\
.filter_by(name=name, namespace_id=namespace['id'])
metadef_object = query.one()
except sa_orm.exc.NoResultFound:
msg = ("The metadata definition object with name=%(name)s"
" was not found in namespace=%(namespace_name)s."
% {'name': name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefObjectNotFound(object_name=name,
namespace_name=namespace_name)
return metadef_object
def get_all(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(models.MetadefObject)\
.filter_by(namespace_id=namespace['id'])
md_objects = query.all()
md_objects_list = []
for obj in md_objects:
md_objects_list.append(obj.to_dict())
return md_objects_list
def create(context, namespace_name, values, session):
namespace = namespace_api.get(context, namespace_name, session)
values.update({'namespace_id': namespace['id']})
md_object = models.MetadefObject()
metadef_utils.drop_protected_attrs(models.MetadefObject, values)
md_object.update(values.copy())
try:
md_object.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("A metadata definition object with name=%(name)s"
" in namespace=%(namespace_name)s already exists."
% {'name': md_object.name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateObject(
object_name=md_object.name, namespace_name=namespace_name)
return md_object.to_dict()
def get(context, namespace_name, name, session):
md_object = _get_by_name(context, namespace_name, name, session)
return md_object.to_dict()
def update(context, namespace_name, object_id, values, session):
"""Update an object, raise if ns not found/visible or duplicate result"""
namespace_api.get(context, namespace_name, session)
md_object = _get(context, object_id, session)
metadef_utils.drop_protected_attrs(models.MetadefObject, values)
# values['updated_at'] = timeutils.utcnow() - done by TS mixin
try:
md_object.update(values.copy())
md_object.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Invalid update. It would result in a duplicate"
" metadata definition object with same name=%(name)s"
" in namespace=%(namespace_name)s."
% {'name': md_object.name, 'namespace_name': namespace_name})
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition object with the same name=%(name)s"
" in namespace=%(namespace_name)s.")
% {'name': md_object.name, 'namespace_name': namespace_name})
raise exc.MetadefDuplicateObject(emsg)
return md_object.to_dict()
def delete(context, namespace_name, object_name, session):
namespace_api.get(context, namespace_name, session)
md_object = _get_by_name(context, namespace_name, object_name, session)
session.delete(md_object)
session.flush()
return md_object.to_dict()
def delete_namespace_content(context, namespace_id, session):
"""Use this def only if the ns for the id has been verified as visible"""
count = 0
query = session.query(models.MetadefObject)\
.filter_by(namespace_id=namespace_id)
count = query.delete(synchronize_session='fetch')
return count
def delete_by_namespace_name(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
return delete_namespace_content(context, namespace['id'], session)
def count(context, namespace_name, session):
"""Get the count of objects for a namespace, raise if ns not found"""
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(func.count(models.MetadefObject.id))\
.filter_by(namespace_id=namespace['id'])
return query.scalar()

View File

@ -1,170 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
from sqlalchemy import func
import sqlalchemy.orm as sa_orm
from daisy.common import exception as exc
from daisy.db.sqlalchemy.metadef_api import namespace as namespace_api
from daisy.db.sqlalchemy.metadef_api import utils as metadef_utils
from daisy.db.sqlalchemy import models_metadef as models
from daisy import i18n
LOG = logging.getLogger(__name__)
_ = i18n._
_LW = i18n._LW
def _get(context, property_id, session):
try:
query = session.query(models.MetadefProperty)\
.filter_by(id=property_id)
property_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = (_("Metadata definition property not found for id=%s")
% property_id)
LOG.warn(msg)
raise exc.MetadefPropertyNotFound(msg)
return property_rec
def _get_by_name(context, namespace_name, name, session):
"""get a property; raise if ns not found/visible or property not found"""
namespace = namespace_api.get(context, namespace_name, session)
try:
query = session.query(models.MetadefProperty)\
.filter_by(name=name, namespace_id=namespace['id'])
property_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = ("The metadata definition property with name=%(name)s"
" was not found in namespace=%(namespace_name)s."
% {'name': name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefPropertyNotFound(property_name=name,
namespace_name=namespace_name)
return property_rec
def get(context, namespace_name, name, session):
"""get a property; raise if ns not found/visible or property not found"""
property_rec = _get_by_name(context, namespace_name, name, session)
return property_rec.to_dict()
def get_all(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(models.MetadefProperty)\
.filter_by(namespace_id=namespace['id'])
properties = query.all()
properties_list = []
for prop in properties:
properties_list.append(prop.to_dict())
return properties_list
def create(context, namespace_name, values, session):
namespace = namespace_api.get(context, namespace_name, session)
values.update({'namespace_id': namespace['id']})
property_rec = models.MetadefProperty()
metadef_utils.drop_protected_attrs(models.MetadefProperty, values)
property_rec.update(values.copy())
try:
property_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Can not create metadata definition property. A property"
" with name=%(name)s already exists in"
" namespace=%(namespace_name)s."
% {'name': property_rec.name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateProperty(
property_name=property_rec.name,
namespace_name=namespace_name)
return property_rec.to_dict()
def update(context, namespace_name, property_id, values, session):
"""Update a property, raise if ns not found/visible or duplicate result"""
namespace_api.get(context, namespace_name, session)
property_rec = _get(context, property_id, session)
metadef_utils.drop_protected_attrs(models.MetadefProperty, values)
# values['updated_at'] = timeutils.utcnow() - done by TS mixin
try:
property_rec.update(values.copy())
property_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Invalid update. It would result in a duplicate"
" metadata definition property with the same name=%(name)s"
" in namespace=%(namespace_name)s."
% {'name': property_rec.name,
'namespace_name': namespace_name})
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition property with the same name=%(name)s"
" in namespace=%(namespace_name)s.")
% {'name': property_rec.name,
'namespace_name': namespace_name})
raise exc.MetadefDuplicateProperty(emsg)
return property_rec.to_dict()
def delete(context, namespace_name, property_name, session):
property_rec = _get_by_name(
context, namespace_name, property_name, session)
if property_rec:
session.delete(property_rec)
session.flush()
return property_rec.to_dict()
def delete_namespace_content(context, namespace_id, session):
"""Use this def only if the ns for the id has been verified as visible"""
count = 0
query = session.query(models.MetadefProperty)\
.filter_by(namespace_id=namespace_id)
count = query.delete(synchronize_session='fetch')
return count
def delete_by_namespace_name(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
return delete_namespace_content(context, namespace['id'], session)
def count(context, namespace_name, session):
"""Get the count of properties for a namespace, raise if ns not found"""
namespace = namespace_api.get(context, namespace_name, session)
query = session.query(func.count(models.MetadefProperty.id))\
.filter_by(namespace_id=namespace['id'])
return query.scalar()

View File

@ -1,111 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
import sqlalchemy.exc as sa_exc
import sqlalchemy.orm as sa_orm
from daisy.common import exception as exc
import daisy.db.sqlalchemy.metadef_api.utils as metadef_utils
from daisy.db.sqlalchemy import models_metadef as models
LOG = logging.getLogger(__name__)
def get(context, name, session):
"""Get a resource type, raise if not found"""
try:
query = session.query(models.MetadefResourceType)\
.filter_by(name=name)
resource_type = query.one()
except sa_orm.exc.NoResultFound:
msg = "No metadata definition resource-type found with name %s" % name
LOG.debug(msg)
raise exc.MetadefResourceTypeNotFound(resource_type_name=name)
return resource_type.to_dict()
def get_all(context, session):
"""Get a list of all resource types"""
query = session.query(models.MetadefResourceType)
resource_types = query.all()
resource_types_list = []
for rt in resource_types:
resource_types_list.append(rt.to_dict())
return resource_types_list
def create(context, values, session):
"""Create a resource_type, raise if it already exists."""
resource_type = models.MetadefResourceType()
metadef_utils.drop_protected_attrs(models.MetadefResourceType, values)
resource_type.update(values.copy())
try:
resource_type.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Can not create the metadata definition resource-type."
" A resource-type with name=%s already exists."
% resource_type.name)
LOG.debug(msg)
raise exc.MetadefDuplicateResourceType(
resource_type_name=resource_type.name)
return resource_type.to_dict()
def update(context, values, session):
"""Update a resource type, raise if not found"""
name = values['name']
metadef_utils.drop_protected_attrs(models.MetadefResourceType, values)
db_rec = get(context, name, session)
db_rec.update(values.copy())
db_rec.save(session=session)
return db_rec.to_dict()
def delete(context, name, session):
"""Delete a resource type or raise if not found or is protected"""
db_rec = get(context, name, session)
if db_rec.protected is True:
msg = ("Delete forbidden. Metadata definition resource-type %s is a"
" seeded-system type and can not be deleted.") % name
LOG.debug(msg)
raise exc.ProtectedMetadefResourceTypeSystemDelete(
resource_type_name=name)
try:
session.delete(db_rec)
session.flush()
except db_exc.DBError as e:
if isinstance(e.inner_exception, sa_exc.IntegrityError):
msg = ("Could not delete Metadata definition resource-type %s"
". It still has content") % name
LOG.debug(msg)
raise exc.MetadefIntegrityError(
record_type='resource-type', record_name=name)
else:
raise e
return db_rec.to_dict()

View File

@ -1,217 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_log import log as logging
import sqlalchemy.orm as sa_orm
from daisy.common import exception as exc
from daisy.db.sqlalchemy.metadef_api\
import namespace as namespace_api
from daisy.db.sqlalchemy.metadef_api\
import resource_type as resource_type_api
from daisy.db.sqlalchemy.metadef_api\
import utils as metadef_utils
from daisy.db.sqlalchemy import models_metadef as models
LOG = logging.getLogger(__name__)
def _to_db_dict(namespace_id, resource_type_id, model_dict):
"""transform a model dict to a metadef_namespace_resource_type dict"""
db_dict = {'namespace_id': namespace_id,
'resource_type_id': resource_type_id,
'properties_target': model_dict['properties_target'],
'prefix': model_dict['prefix']}
return db_dict
def _to_model_dict(resource_type_name, ns_res_type_dict):
"""transform a metadef_namespace_resource_type dict to a model dict"""
model_dict = {'name': resource_type_name,
'properties_target': ns_res_type_dict['properties_target'],
'prefix': ns_res_type_dict['prefix'],
'created_at': ns_res_type_dict['created_at'],
'updated_at': ns_res_type_dict['updated_at']}
return model_dict
def _set_model_dict(resource_type_name, properties_target, prefix,
created_at, updated_at):
"""return a model dict set with the passed in key values"""
model_dict = {'name': resource_type_name,
'properties_target': properties_target,
'prefix': prefix,
'created_at': created_at,
'updated_at': updated_at}
return model_dict
def _get(context, namespace_name, resource_type_name,
namespace_id, resource_type_id, session):
"""Get a namespace resource_type association"""
# visibility check assumed done in calling routine via namespace_get
try:
query = session.query(models.MetadefNamespaceResourceType).\
filter_by(namespace_id=namespace_id,
resource_type_id=resource_type_id)
db_rec = query.one()
except sa_orm.exc.NoResultFound:
msg = ("The metadata definition resource-type association of"
" resource_type=%(resource_type_name)s to"
" namespace_name=%(namespace_name)s was not found."
% {'resource_type_name': resource_type_name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefResourceTypeAssociationNotFound(
resource_type_name=resource_type_name,
namespace_name=namespace_name)
return db_rec
def _create_association(
context, namespace_name, resource_type_name, values, session):
"""Create an association, raise if it already exists."""
namespace_resource_type_rec = models.MetadefNamespaceResourceType()
metadef_utils.drop_protected_attrs(
models.MetadefNamespaceResourceType, values)
# values['updated_at'] = timeutils.utcnow() # TS mixin should do this
namespace_resource_type_rec.update(values.copy())
try:
namespace_resource_type_rec.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("The metadata definition resource-type association of"
" resource_type=%(resource_type_name)s to"
" namespace=%(namespace_name)s, already exists."
% {'resource_type_name': resource_type_name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateResourceTypeAssociation(
resource_type_name=resource_type_name,
namespace_name=namespace_name)
return namespace_resource_type_rec.to_dict()
def _delete(context, namespace_name, resource_type_name,
namespace_id, resource_type_id, session):
"""Delete a resource type association or raise if not found."""
db_rec = _get(context, namespace_name, resource_type_name,
namespace_id, resource_type_id, session)
session.delete(db_rec)
session.flush()
return db_rec.to_dict()
def get(context, namespace_name, resource_type_name, session):
"""Get a resource_type associations; raise if not found"""
namespace = namespace_api.get(
context, namespace_name, session)
resource_type = resource_type_api.get(
context, resource_type_name, session)
found = _get(context, namespace_name, resource_type_name,
namespace['id'], resource_type['id'], session)
return _to_model_dict(resource_type_name, found)
def get_all_by_namespace(context, namespace_name, session):
"""List resource_type associations by namespace, raise if not found"""
# namespace get raises an exception if not visible
namespace = namespace_api.get(
context, namespace_name, session)
db_recs = (
session.query(models.MetadefResourceType)
.join(models.MetadefResourceType.associations)
.filter_by(namespace_id=namespace['id'])
.values(models.MetadefResourceType.name,
models.MetadefNamespaceResourceType.properties_target,
models.MetadefNamespaceResourceType.prefix,
models.MetadefNamespaceResourceType.created_at,
models.MetadefNamespaceResourceType.updated_at))
model_dict_list = []
for name, properties_target, prefix, created_at, updated_at in db_recs:
model_dict_list.append(
_set_model_dict
(name, properties_target, prefix, created_at, updated_at)
)
return model_dict_list
def create(context, namespace_name, values, session):
"""Create an association, raise if already exists or ns not found."""
namespace = namespace_api.get(
context, namespace_name, session)
# if the resource_type does not exist, create it
resource_type_name = values['name']
metadef_utils.drop_protected_attrs(
models.MetadefNamespaceResourceType, values)
try:
resource_type = resource_type_api.get(
context, resource_type_name, session)
except exc.NotFound:
resource_type = None
LOG.debug("Creating resource-type %s" % resource_type_name)
if resource_type is None:
resource_type_dict = {'name': resource_type_name, 'protected': 0}
resource_type = resource_type_api.create(
context, resource_type_dict, session)
# Create the association record, set the field values
ns_resource_type_dict = _to_db_dict(
namespace['id'], resource_type['id'], values)
new_rec = _create_association(context, namespace_name, resource_type_name,
ns_resource_type_dict, session)
return _to_model_dict(resource_type_name, new_rec)
def delete(context, namespace_name, resource_type_name, session):
"""Delete an association or raise if not found"""
namespace = namespace_api.get(
context, namespace_name, session)
resource_type = resource_type_api.get(
context, resource_type_name, session)
deleted = _delete(context, namespace_name, resource_type_name,
namespace['id'], resource_type['id'], session)
return _to_model_dict(resource_type_name, deleted)
def delete_namespace_content(context, namespace_id, session):
"""Use this def only if the ns for the id has been verified as visible"""
count = 0
query = session.query(models.MetadefNamespaceResourceType)\
.filter_by(namespace_id=namespace_id)
count = query.delete(synchronize_session='fetch')
return count

View File

@ -1,204 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_db import exception as db_exc
from oslo_db.sqlalchemy.utils import paginate_query
from oslo_log import log as logging
from sqlalchemy import func
import sqlalchemy.orm as sa_orm
from daisy.common import exception as exc
from daisy.db.sqlalchemy.metadef_api import namespace as namespace_api
import daisy.db.sqlalchemy.metadef_api.utils as metadef_utils
from daisy.db.sqlalchemy import models_metadef as models
from daisy import i18n
LOG = logging.getLogger(__name__)
_LW = i18n._LW
def _get(context, id, session):
try:
query = (session.query(models.MetadefTag).filter_by(id=id))
metadef_tag = query.one()
except sa_orm.exc.NoResultFound:
msg = (_LW("Metadata tag not found for id %s") % id)
LOG.warn(msg)
raise exc.MetadefTagNotFound(message=msg)
return metadef_tag
def _get_by_name(context, namespace_name, name, session):
namespace = namespace_api.get(context, namespace_name, session)
try:
query = (session.query(models.MetadefTag).filter_by(
name=name, namespace_id=namespace['id']))
metadef_tag = query.one()
except sa_orm.exc.NoResultFound:
msg = ("The metadata tag with name=%(name)s"
" was not found in namespace=%(namespace_name)s."
% {'name': name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefTagNotFound(name=name,
namespace_name=namespace_name)
return metadef_tag
def get_all(context, namespace_name, session, filters=None, marker=None,
limit=None, sort_key='created_at', sort_dir='desc'):
"""Get all tags that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: tag id after which to start page
:param limit: maximum number of namespaces to return
:param sort_key: namespace attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
"""
namespace = namespace_api.get(context, namespace_name, session)
query = (session.query(models.MetadefTag).filter_by(
namespace_id=namespace['id']))
marker_tag = None
if marker is not None:
marker_tag = _get(context, marker, session)
sort_keys = ['created_at', 'id']
sort_keys.insert(0, sort_key) if sort_key not in sort_keys else sort_keys
query = paginate_query(query=query,
model=models.MetadefTag,
limit=limit,
sort_keys=sort_keys,
marker=marker_tag, sort_dir=sort_dir)
metadef_tag = query.all()
metadef_tag_list = []
for tag in metadef_tag:
metadef_tag_list.append(tag.to_dict())
return metadef_tag_list
def create(context, namespace_name, values, session):
namespace = namespace_api.get(context, namespace_name, session)
values.update({'namespace_id': namespace['id']})
metadef_tag = models.MetadefTag()
metadef_utils.drop_protected_attrs(models.MetadefTag, values)
metadef_tag.update(values.copy())
try:
metadef_tag.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("A metadata tag name=%(name)s"
" in namespace=%(namespace_name)s already exists."
% {'name': metadef_tag.name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateTag(
name=metadef_tag.name, namespace_name=namespace_name)
return metadef_tag.to_dict()
def create_tags(context, namespace_name, tag_list, session):
metadef_tags_list = []
if tag_list:
namespace = namespace_api.get(context, namespace_name, session)
try:
with session.begin():
query = (session.query(models.MetadefTag).filter_by(
namespace_id=namespace['id']))
query.delete(synchronize_session='fetch')
for value in tag_list:
value.update({'namespace_id': namespace['id']})
metadef_utils.drop_protected_attrs(
models.MetadefTag, value)
metadef_tag = models.MetadefTag()
metadef_tag.update(value.copy())
metadef_tag.save(session=session)
metadef_tags_list.append(metadef_tag.to_dict())
except db_exc.DBDuplicateEntry:
msg = ("A metadata tag name=%(name)s"
" in namespace=%(namespace_name)s already exists."
% {'name': metadef_tag.name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateTag(
name=metadef_tag.name, namespace_name=namespace_name)
return metadef_tags_list
def get(context, namespace_name, name, session):
metadef_tag = _get_by_name(context, namespace_name, name, session)
return metadef_tag.to_dict()
def update(context, namespace_name, id, values, session):
"""Update an tag, raise if ns not found/visible or duplicate result"""
namespace_api.get(context, namespace_name, session)
metadata_tag = _get(context, id, session)
metadef_utils.drop_protected_attrs(models.MetadefTag, values)
# values['updated_at'] = timeutils.utcnow() - done by TS mixin
try:
metadata_tag.update(values.copy())
metadata_tag.save(session=session)
except db_exc.DBDuplicateEntry:
msg = ("Invalid update. It would result in a duplicate"
" metadata tag with same name=%(name)s"
" in namespace=%(namespace_name)s."
% {'name': values['name'],
'namespace_name': namespace_name})
LOG.debug(msg)
raise exc.MetadefDuplicateTag(
name=values['name'], namespace_name=namespace_name)
return metadata_tag.to_dict()
def delete(context, namespace_name, name, session):
namespace_api.get(context, namespace_name, session)
md_tag = _get_by_name(context, namespace_name, name, session)
session.delete(md_tag)
session.flush()
return md_tag.to_dict()
def delete_namespace_content(context, namespace_id, session):
"""Use this def only if the ns for the id has been verified as visible"""
count = 0
query = (session.query(models.MetadefTag).filter_by(
namespace_id=namespace_id))
count = query.delete(synchronize_session='fetch')
return count
def delete_by_namespace_name(context, namespace_name, session):
namespace = namespace_api.get(context, namespace_name, session)
return delete_namespace_content(context, namespace['id'], session)
def count(context, namespace_name, session):
"""Get the count of objects for a namespace, raise if ns not found"""
namespace = namespace_api.get(context, namespace_name, session)
query = (session.query(func.count(models.MetadefTag.id)).filter_by(
namespace_id=namespace['id']))
return query.scalar()

View File

@ -1,23 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
def drop_protected_attrs(model_class, values):
"""
Removed protected attributes from values dictionary using the models
__protected_attributes__ field.
"""
for attr in model_class.__protected_attributes__:
if attr in values:
del values[attr]

View File

@ -1,336 +0,0 @@
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
from sqlalchemy import BigInteger
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy.ext import declarative
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy import Numeric
from sqlalchemy.orm import backref
from sqlalchemy.orm import composite
from sqlalchemy.orm import relationship
from sqlalchemy import String
from sqlalchemy import Text
import daisy.artifacts as ga
from daisy.common import semver_db
from daisy import i18n
from oslo_log import log as os_logging
BASE = declarative.declarative_base()
LOG = os_logging.getLogger(__name__)
_LW = i18n._LW
class ArtifactBase(models.ModelBase, models.TimestampMixin):
"""Base class for Artifact Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
__protected_attributes__ = set([
"created_at", "updated_at"])
created_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False)
updated_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False, onupdate=lambda: timeutils.utcnow())
def save(self, session=None):
from daisy.db.sqlalchemy import api as db_api
super(ArtifactBase, self).save(session or db_api.get_session())
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def items(self):
return self.__dict__.items()
def to_dict(self):
d = {}
for c in self.__table__.columns:
d[c.name] = self[c.name]
return d
def _parse_property_type_value(prop, show_text_properties=True):
columns = [
'int_value',
'string_value',
'bool_value',
'numeric_value']
if show_text_properties:
columns.append('text_value')
for prop_type in columns:
if getattr(prop, prop_type) is not None:
return prop_type.rpartition('_')[0], getattr(prop, prop_type)
return None, None
class Artifact(BASE, ArtifactBase):
__tablename__ = 'artifacts'
__table_args__ = (
Index('ix_artifact_name_and_version', 'name', 'version_prefix',
'version_suffix'),
Index('ix_artifact_type', 'type_name', 'type_version_prefix',
'type_version_suffix'),
Index('ix_artifact_state', 'state'),
Index('ix_artifact_owner', 'owner'),
Index('ix_artifact_visibility', 'visibility'),
{'mysql_engine': 'InnoDB'})
__protected_attributes__ = ArtifactBase.__protected_attributes__.union(
set(['published_at', 'deleted_at']))
id = Column(String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = Column(String(255), nullable=False)
type_name = Column(String(255), nullable=False)
type_version_prefix = Column(BigInteger, nullable=False)
type_version_suffix = Column(String(255))
type_version_meta = Column(String(255))
type_version = composite(semver_db.DBVersion, type_version_prefix,
type_version_suffix, type_version_meta)
version_prefix = Column(BigInteger, nullable=False)
version_suffix = Column(String(255))
version_meta = Column(String(255))
version = composite(semver_db.DBVersion, version_prefix,
version_suffix, version_meta)
description = Column(Text)
visibility = Column(String(32), nullable=False)
state = Column(String(32), nullable=False)
owner = Column(String(255), nullable=False)
published_at = Column(DateTime)
deleted_at = Column(DateTime)
def to_dict(self, show_level=ga.Showlevel.BASIC,
show_text_properties=True):
d = super(Artifact, self).to_dict()
d.pop('type_version_prefix')
d.pop('type_version_suffix')
d.pop('type_version_meta')
d.pop('version_prefix')
d.pop('version_suffix')
d.pop('version_meta')
d['type_version'] = str(self.type_version)
d['version'] = str(self.version)
tags = []
for tag in self.tags:
tags.append(tag.value)
d['tags'] = tags
if show_level == ga.Showlevel.NONE:
return d
properties = {}
# sort properties
self.properties.sort(key=lambda elem: (elem.name, elem.position))
for prop in self.properties:
proptype, propvalue = _parse_property_type_value(
prop, show_text_properties)
if proptype is None:
continue
if prop.position is not None:
# make array
for p in properties.keys():
if p == prop.name:
# add value to array
properties[p]['value'].append(dict(type=proptype,
value=propvalue))
break
else:
# create new array
p = dict(type='array',
value=[])
p['value'].append(dict(type=proptype,
value=propvalue))
properties[prop.name] = p
else:
# make scalar
properties[prop.name] = dict(type=proptype,
value=propvalue)
d['properties'] = properties
blobs = {}
# sort blobs
self.blobs.sort(key=lambda elem: elem.position)
for blob in self.blobs:
locations = []
# sort locations
blob.locations.sort(key=lambda elem: elem.position)
for loc in blob.locations:
locations.append(dict(value=loc.value,
status=loc.status))
if blob.name in blobs:
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
else:
blobs[blob.name] = []
blobs[blob.name].append(dict(size=blob.size,
checksum=blob.checksum,
locations=locations,
item_key=blob.item_key))
d['blobs'] = blobs
return d
class ArtifactDependency(BASE, ArtifactBase):
__tablename__ = 'artifact_dependencies'
__table_args__ = (Index('ix_artifact_dependencies_source_id',
'artifact_source'),
Index('ix_artifact_dependencies_origin_id',
'artifact_origin'),
Index('ix_artifact_dependencies_dest_id',
'artifact_dest'),
Index('ix_artifact_dependencies_direct_dependencies',
'artifact_source', 'is_direct'),
{'mysql_engine': 'InnoDB'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_source = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact_dest = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact_origin = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
is_direct = Column(Boolean, nullable=False)
position = Column(Integer)
name = Column(String(36))
source = relationship('Artifact',
backref=backref('dependencies', cascade="all, "
"delete"),
foreign_keys="ArtifactDependency.artifact_source")
dest = relationship('Artifact',
foreign_keys="ArtifactDependency.artifact_dest")
origin = relationship('Artifact',
foreign_keys="ArtifactDependency.artifact_origin")
class ArtifactTag(BASE, ArtifactBase):
__tablename__ = 'artifact_tags'
__table_args__ = (Index('ix_artifact_tags_artifact_id', 'artifact_id'),
Index('ix_artifact_tags_artifact_id_tag_value',
'artifact_id', 'value'),
{'mysql_engine': 'InnoDB'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact = relationship(Artifact,
backref=backref('tags',
cascade="all, delete-orphan"))
value = Column(String(255), nullable=False)
class ArtifactProperty(BASE, ArtifactBase):
__tablename__ = 'artifact_properties'
__table_args__ = (
Index('ix_artifact_properties_artifact_id', 'artifact_id'),
Index('ix_artifact_properties_name', 'name'),
{'mysql_engine': 'InnoDB'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
artifact = relationship(Artifact,
backref=backref('properties',
cascade="all, delete-orphan"))
name = Column(String(255), nullable=False)
string_value = Column(String(255))
int_value = Column(Integer)
numeric_value = Column(Numeric)
bool_value = Column(Boolean)
text_value = Column(Text)
position = Column(Integer)
class ArtifactBlob(BASE, ArtifactBase):
__tablename__ = 'artifact_blobs'
__table_args__ = (
Index('ix_artifact_blobs_artifact_id', 'artifact_id'),
Index('ix_artifact_blobs_name', 'name'),
{'mysql_engine': 'InnoDB'},)
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
artifact_id = Column(String(36), ForeignKey('artifacts.id'),
nullable=False)
name = Column(String(255), nullable=False)
item_key = Column(String(329))
size = Column(BigInteger(), nullable=False)
checksum = Column(String(32))
position = Column(Integer)
artifact = relationship(Artifact,
backref=backref('blobs',
cascade="all, delete-orphan"))
class ArtifactBlobLocation(BASE, ArtifactBase):
__tablename__ = 'artifact_blob_locations'
__table_args__ = (Index('ix_artifact_blob_locations_blob_id',
'blob_id'),
{'mysql_engine': 'InnoDB'})
id = Column(String(36), primary_key=True, nullable=False,
default=lambda: str(uuid.uuid4()))
blob_id = Column(String(36), ForeignKey('artifact_blobs.id'),
nullable=False)
value = Column(Text, nullable=False)
position = Column(Integer)
status = Column(String(36), default='active', nullable=True)
blob = relationship(ArtifactBlob,
backref=backref('locations',
cascade="all, delete-orphan"))
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = (Artifact, ArtifactTag, ArtifactProperty,
ArtifactBlob, ArtifactBlobLocation, ArtifactDependency)
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = (ArtifactDependency, ArtifactBlobLocation, ArtifactBlob,
ArtifactProperty, ArtifactTag, Artifact)
for model in models:
model.metadata.drop_all(engine)

View File

@ -1,168 +0,0 @@
# Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for glance metadata schema
"""
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
from sqlalchemy import Boolean
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import Integer
from sqlalchemy.orm import relationship
from sqlalchemy import String
from sqlalchemy import Text
from daisy.db.sqlalchemy.models import JSONEncodedDict
class DictionaryBase(models.ModelBase):
metadata = None
def to_dict(self):
d = {}
for c in self.__table__.columns:
d[c.name] = self[c.name]
return d
BASE_DICT = declarative_base(cls=DictionaryBase)
class DaisyMetadefBase(models.TimestampMixin):
"""Base class for Glance Metadef Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
__table_initialized__ = False
__protected_attributes__ = set(["created_at", "updated_at"])
created_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=False)
# TODO(wko): Column `updated_at` have no default value in
# openstack common code. We should decide, is this value
# required and make changes in oslo (if required) or
# in glance (if not).
updated_at = Column(DateTime, default=lambda: timeutils.utcnow(),
nullable=True, onupdate=lambda: timeutils.utcnow())
class MetadefNamespace(BASE_DICT, DaisyMetadefBase):
"""Represents a metadata-schema namespace in the datastore."""
__tablename__ = 'metadef_namespaces'
__table_args__ = (Index('ix_metadef_namespaces_namespace', 'namespace'),
Index('ix_metadef_namespaces_owner', 'owner'))
id = Column(Integer, primary_key=True, nullable=False)
namespace = Column(String(80), nullable=False)
display_name = Column(String(80))
description = Column(Text())
visibility = Column(String(32))
protected = Column(Boolean)
owner = Column(String(255), nullable=False)
class MetadefObject(BASE_DICT, DaisyMetadefBase):
"""Represents a metadata-schema object in the datastore."""
__tablename__ = 'metadef_objects'
__table_args__ = (Index('ix_metadef_objects_namespace_id', 'namespace_id'),
Index('ix_metadef_objects_name', 'name'))
id = Column(Integer, primary_key=True, nullable=False)
namespace_id = Column(Integer(), ForeignKey('metadef_namespaces.id'),
nullable=False)
name = Column(String(80), nullable=False)
description = Column(Text())
required = Column(Text())
json_schema = Column(JSONEncodedDict(), default={}, nullable=False)
class MetadefProperty(BASE_DICT, DaisyMetadefBase):
"""Represents a metadata-schema namespace-property in the datastore."""
__tablename__ = 'metadef_properties'
__table_args__ = (Index('ix_metadef_properties_namespace_id',
'namespace_id'),
Index('ix_metadef_properties_name', 'name'))
id = Column(Integer, primary_key=True, nullable=False)
namespace_id = Column(Integer(), ForeignKey('metadef_namespaces.id'),
nullable=False)
name = Column(String(80), nullable=False)
json_schema = Column(JSONEncodedDict(), default={}, nullable=False)
class MetadefNamespaceResourceType(BASE_DICT, DaisyMetadefBase):
"""Represents a metadata-schema namespace-property in the datastore."""
__tablename__ = 'metadef_namespace_resource_types'
__table_args__ = (Index('ix_metadef_ns_res_types_res_type_id_ns_id',
'resource_type_id', 'namespace_id'),
Index('ix_metadef_ns_res_types_namespace_id',
'namespace_id'))
resource_type_id = Column(Integer,
ForeignKey('metadef_resource_types.id'),
primary_key=True, nullable=False)
namespace_id = Column(Integer, ForeignKey('metadef_namespaces.id'),
primary_key=True, nullable=False)
properties_target = Column(String(80))
prefix = Column(String(80))
class MetadefResourceType(BASE_DICT, DaisyMetadefBase):
"""Represents a metadata-schema resource type in the datastore."""
__tablename__ = 'metadef_resource_types'
__table_args__ = (Index('ix_metadef_resource_types_name', 'name'), )
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String(80), nullable=False)
protected = Column(Boolean, nullable=False, default=False)
associations = relationship(
"MetadefNamespaceResourceType",
primaryjoin=id == MetadefNamespaceResourceType.resource_type_id)
class MetadefTag(BASE_DICT, DaisyMetadefBase):
"""Represents a metadata-schema tag in the data store."""
__tablename__ = 'metadef_tags'
__table_args__ = (Index('ix_metadef_tags_namespace_id',
'namespace_id', 'name'),
Index('ix_metadef_tags_name', 'name'))
id = Column(Integer, primary_key=True, nullable=False)
namespace_id = Column(Integer(), ForeignKey('metadef_namespaces.id'),
nullable=False)
name = Column(String(80), nullable=False)
def register_models(engine):
"""Create database tables for all models with the given engine."""
models = (MetadefNamespace, MetadefObject, MetadefProperty,
MetadefTag,
MetadefResourceType, MetadefNamespaceResourceType)
for model in models:
model.metadata.create_all(engine)
def unregister_models(engine):
"""Drop database tables for all models with the given engine."""
models = (MetadefObject, MetadefProperty, MetadefNamespaceResourceType,
MetadefTag,
MetadefNamespace, MetadefResourceType)
for model in models:
model.metadata.drop_all(engine)

View File

@ -1,6 +1,5 @@
[metadata]
name = daisy
version = 2015.1
summary = OpenStack Image Service
description-file =
README.rst
@ -64,11 +63,6 @@ all_files = 1
build-dir = doc/build
source-dir = doc/source
[egg_info]
tag_build =
tag_date = 0
tag_svn_revision = 0
[compile_catalog]
directory = daisy/locale
domain = daisy