Implement server-side caching.
For the beginning, let's place server package cache into muranorepository/api dir (this path can be easily got with v1_api.root_path). Change-Id: I7acbb174491f153eb340efb92ed3b0ce4c5f840f Implements-feature: MRN-1149
This commit is contained in:
parent
b6613f9ecb
commit
d121a41953
@ -16,6 +16,7 @@ import os
|
|||||||
|
|
||||||
from flask import Blueprint, send_file
|
from flask import Blueprint, send_file
|
||||||
from flask import jsonify, request, abort
|
from flask import jsonify, request, abort
|
||||||
|
from flask import make_response
|
||||||
from werkzeug import secure_filename
|
from werkzeug import secure_filename
|
||||||
|
|
||||||
from muranorepository.utils.parser import ManifestParser
|
from muranorepository.utils.parser import ManifestParser
|
||||||
@ -25,20 +26,23 @@ from oslo.config import cfg
|
|||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
v1_api = Blueprint('v1', __name__)
|
v1_api = Blueprint('v1', __name__)
|
||||||
|
CACHE_DIR = os.path.join(v1_api.root_path, 'cache')
|
||||||
|
|
||||||
|
if not os.path.exists(CACHE_DIR):
|
||||||
|
os.mkdir(CACHE_DIR)
|
||||||
|
|
||||||
|
|
||||||
def _get_archive(client):
|
def _get_archive(client, hash_sum):
|
||||||
parser = ManifestParser(CONF.manifests)
|
parser = ManifestParser(CONF.manifests)
|
||||||
manifests = parser.parse()
|
manifests = parser.parse()
|
||||||
|
types = None
|
||||||
if client == 'conductor':
|
if client == 'conductor':
|
||||||
return Archiver().create(manifests,
|
types = ('heat', 'agent', 'scripts')
|
||||||
'heat',
|
|
||||||
'agent',
|
|
||||||
'scripts')
|
|
||||||
elif client == 'ui':
|
elif client == 'ui':
|
||||||
return Archiver().create(manifests, client)
|
types = ('ui',)
|
||||||
else:
|
else:
|
||||||
abort(404)
|
abort(404)
|
||||||
|
return Archiver().create(client, CACHE_DIR, manifests, hash_sum, types)
|
||||||
|
|
||||||
|
|
||||||
def _get_locations(data_type, result_path):
|
def _get_locations(data_type, result_path):
|
||||||
@ -83,8 +87,11 @@ def _check_data_type(data_type):
|
|||||||
|
|
||||||
@v1_api.route('/client/<path:type>')
|
@v1_api.route('/client/<path:type>')
|
||||||
def get_archive_data(type):
|
def get_archive_data(type):
|
||||||
return send_file(_get_archive(type),
|
path = _get_archive(type, request.args.get('hash'))
|
||||||
mimetype='application/octet-stream')
|
if path:
|
||||||
|
return send_file(path, mimetype='application/octet-stream')
|
||||||
|
else:
|
||||||
|
return make_response(('Not modified', 304))
|
||||||
|
|
||||||
|
|
||||||
@v1_api.route('/admin/<data_type>')
|
@v1_api.route('/admin/<data_type>')
|
||||||
|
@ -15,11 +15,15 @@ import os
|
|||||||
import tarfile
|
import tarfile
|
||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
|
import hashlib
|
||||||
import logging as log
|
import logging as log
|
||||||
from oslo.config import cfg
|
from oslo.config import cfg
|
||||||
from muranorepository.consts import DATA_TYPES
|
from muranorepository.consts import DATA_TYPES
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
|
ARCHIVE_PKG_NAME = 'data.tar.gz'
|
||||||
|
CHUNK_SIZE = 1 << 20 # 1MB
|
||||||
|
|
||||||
|
|
||||||
class Archiver(object):
|
class Archiver(object):
|
||||||
def _copy_data(self, file_lists, src, dst):
|
def _copy_data(self, file_lists, src, dst):
|
||||||
@ -39,18 +43,70 @@ class Archiver(object):
|
|||||||
log.error("Unable to copy file "
|
log.error("Unable to copy file "
|
||||||
"{0}".format(file))
|
"{0}".format(file))
|
||||||
|
|
||||||
def _compose_archive(self, path):
|
def _get_hash(self, archive_path):
|
||||||
target_archive = "data.tar.gz"
|
"""Calculate SHA1-hash of archive file.
|
||||||
with tarfile.open(target_archive, "w:gz") as tar:
|
|
||||||
|
SHA-1 take a bit more time than MD5
|
||||||
|
(see http://tinyurl.com/kpj5jy7), but is more secure.
|
||||||
|
"""
|
||||||
|
# Copy-pasted from muranodashboard/panel/services/metadata.py
|
||||||
|
if os.path.exists(archive_path):
|
||||||
|
sha1 = hashlib.sha1()
|
||||||
|
with open(archive_path) as f:
|
||||||
|
buf = f.read(CHUNK_SIZE)
|
||||||
|
while buf:
|
||||||
|
sha1.update(buf)
|
||||||
|
buf = f.read(CHUNK_SIZE)
|
||||||
|
hsum = sha1.hexdigest()
|
||||||
|
log.debug("Archive '{0}' has hash-sum {1}".format(
|
||||||
|
archive_path, hsum))
|
||||||
|
return hsum
|
||||||
|
else:
|
||||||
|
log.info(
|
||||||
|
"Archive '{0}' doesn't exist, no hash to calculate".format(
|
||||||
|
archive_path))
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _compose_archive(self, path, cache_dir):
|
||||||
|
with tarfile.open(ARCHIVE_PKG_NAME, "w:gz") as tar:
|
||||||
for item in os.listdir(path):
|
for item in os.listdir(path):
|
||||||
tar.add(os.path.join(path, item), item)
|
tar.add(os.path.join(path, item), item)
|
||||||
try:
|
try:
|
||||||
shutil.rmtree(path, ignore_errors=True)
|
shutil.rmtree(path, ignore_errors=True)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error("Unable to delete temp directory: {0}".format(e))
|
log.error("Unable to delete temp directory: {0}".format(e))
|
||||||
return os.path.abspath(target_archive)
|
hash_sum = self._get_hash(ARCHIVE_PKG_NAME)
|
||||||
|
pkg_dir = os.path.join(cache_dir, hash_sum)
|
||||||
|
os.mkdir(pkg_dir)
|
||||||
|
shutil.move(ARCHIVE_PKG_NAME, os.path.join(pkg_dir, ARCHIVE_PKG_NAME))
|
||||||
|
return os.path.abspath(os.path.join(pkg_dir, ARCHIVE_PKG_NAME))
|
||||||
|
|
||||||
def create(self, manifests, *types):
|
def _is_data_cached(self, cache_dir, hash_sum):
|
||||||
|
existing_caches = os.listdir(cache_dir)
|
||||||
|
if len(existing_caches) == 1:
|
||||||
|
if existing_caches[0] == hash_sum:
|
||||||
|
path = os.path.join(cache_dir, hash_sum, ARCHIVE_PKG_NAME)
|
||||||
|
if not os.path.exists(path):
|
||||||
|
raise RuntimeError(
|
||||||
|
'Archive package is missing at dir {0}'.format(
|
||||||
|
os.path.join(cache_dir, hash_sum)))
|
||||||
|
log.debug('Archive package already exists at {0} and it ' +
|
||||||
|
'matches hash-sum {1}.'.format(path, hash_sum))
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
path = os.path.join(cache_dir, hash_sum)
|
||||||
|
log.info('Archive package already exists at {0}, but it '
|
||||||
|
"doesn't match requested hash-sum {1}. "
|
||||||
|
'Deleting it.'.format(path))
|
||||||
|
shutil.rmtree(path)
|
||||||
|
return False
|
||||||
|
elif len(existing_caches) == 0:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Too many cached archives at {0}'.format(
|
||||||
|
cache_dir))
|
||||||
|
|
||||||
|
def create(self, client_type, cache_root, manifests, hash_sum, types):
|
||||||
"""
|
"""
|
||||||
manifests -- list of Manifest objects
|
manifests -- list of Manifest objects
|
||||||
*types - desired data types to be added to archive
|
*types - desired data types to be added to archive
|
||||||
@ -62,6 +118,14 @@ class Archiver(object):
|
|||||||
temp_dir = tempfile.mkdtemp()
|
temp_dir = tempfile.mkdtemp()
|
||||||
except:
|
except:
|
||||||
temp_dir = '/tmp'
|
temp_dir = '/tmp'
|
||||||
|
|
||||||
|
cache_dir = os.path.join(cache_root, client_type)
|
||||||
|
if not os.path.exists(cache_dir):
|
||||||
|
os.mkdir(cache_dir)
|
||||||
|
|
||||||
|
if self._is_data_cached(cache_dir, hash_sum):
|
||||||
|
return None
|
||||||
|
|
||||||
for data_type in types:
|
for data_type in types:
|
||||||
if data_type not in DATA_TYPES:
|
if data_type not in DATA_TYPES:
|
||||||
raise Exception("Please, specify one of the supported data "
|
raise Exception("Please, specify one of the supported data "
|
||||||
@ -84,4 +148,4 @@ class Archiver(object):
|
|||||||
"Manifest for {0} service has no file definitions for "
|
"Manifest for {0} service has no file definitions for "
|
||||||
"{1}".format(manifest.service_display_name, data_type))
|
"{1}".format(manifest.service_display_name, data_type))
|
||||||
|
|
||||||
return self._compose_archive(temp_dir)
|
return self._compose_archive(temp_dir, cache_dir)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user