Removing json files not needed anymore
This commit is contained in:
parent
2483eaa8a8
commit
184b211af0
@ -1,28 +0,0 @@
|
|||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def clean_file(name):
|
|
||||||
with open(name, "r") as f:
|
|
||||||
contents = f.read()
|
|
||||||
lines = contents.splitlines()
|
|
||||||
cleaned_up = list()
|
|
||||||
for line in lines:
|
|
||||||
if line.lstrip().startswith('#'):
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
cleaned_up.append(line)
|
|
||||||
cleaned_lines = os.linesep.join(cleaned_up)
|
|
||||||
data = json.loads(cleaned_lines)
|
|
||||||
output = json.dumps(data, indent=4, sort_keys=True)
|
|
||||||
print(output)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
ME = os.path.basename(sys.argv[0])
|
|
||||||
if len(sys.argv) == 1:
|
|
||||||
print("%s filename filename filename..." % (ME))
|
|
||||||
sys.exit(0)
|
|
||||||
clean_file(sys.argv[1])
|
|
||||||
sys.exit(0)
|
|
@ -1,31 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def load_json(fn):
|
|
||||||
with open(fn, 'r') as f:
|
|
||||||
lines = f.readlines()
|
|
||||||
data = os.linesep.join(
|
|
||||||
l
|
|
||||||
for l in lines
|
|
||||||
if not l.lstrip().startswith('#')
|
|
||||||
)
|
|
||||||
return json.loads(data)
|
|
||||||
|
|
||||||
inputdir = sys.argv[1]
|
|
||||||
distro = sys.argv[2]
|
|
||||||
|
|
||||||
for input_file in glob.glob('%s/*.json' % inputdir):
|
|
||||||
data = load_json(input_file)
|
|
||||||
|
|
||||||
print
|
|
||||||
print ' - name: %s' % os.path.splitext(os.path.basename(input_file))[0]
|
|
||||||
print ' packages:'
|
|
||||||
for pkg, info in sorted(data.get(distro, {}).items()):
|
|
||||||
print ' - name: %s' % pkg
|
|
||||||
for n, v in sorted(info.items()):
|
|
||||||
print ' %s: %s' % (n, v)
|
|
@ -1,126 +0,0 @@
|
|||||||
"""
|
|
||||||
Searches the given path for JSON files, and validates their contents.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import optparse
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
|
|
||||||
|
|
||||||
# Configure logging
|
|
||||||
logging.basicConfig(format='%(levelname)s: %(message)s')
|
|
||||||
ROOT_LOGGER = logging.getLogger("")
|
|
||||||
ROOT_LOGGER.setLevel(logging.WARNING)
|
|
||||||
LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Configure commandlineability
|
|
||||||
parser = optparse.OptionParser()
|
|
||||||
parser.add_option('-p', type="string", default=os.getcwd(),
|
|
||||||
help='the path to search for JSON files', dest='path')
|
|
||||||
parser.add_option('-r', type="string", default='.json$',
|
|
||||||
help='the regular expression to match filenames against ' \
|
|
||||||
'(not absolute paths)', dest='regexp')
|
|
||||||
(args, _) = parser.parse_args()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
files = find_matching_files(args.path, args.regexp)
|
|
||||||
results = True
|
|
||||||
print("Validating %s json files (found using regex [%s] in path [%s])" % (len(files), args.regexp, args.path))
|
|
||||||
for path in files:
|
|
||||||
pres = validate_json(path)
|
|
||||||
if not pres:
|
|
||||||
print("Failed at validating [%s]" % (path))
|
|
||||||
results = False
|
|
||||||
else:
|
|
||||||
print("Validated [%s]" % (path))
|
|
||||||
# Invert our test results to produce a status code
|
|
||||||
if results:
|
|
||||||
exit(0)
|
|
||||||
else:
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_json(path):
|
|
||||||
"""Open a file and validate it's contents as JSON"""
|
|
||||||
try:
|
|
||||||
LOGGER.info("Validating %s" % (path))
|
|
||||||
contents = read_file(path)
|
|
||||||
if contents is False:
|
|
||||||
logging.warning('Insufficient permissions to open: %s' % path)
|
|
||||||
return False
|
|
||||||
except:
|
|
||||||
LOGGER.warning('Unable to open: %s' % path)
|
|
||||||
return False
|
|
||||||
|
|
||||||
#knock off comments
|
|
||||||
ncontents = list()
|
|
||||||
for line in contents.splitlines():
|
|
||||||
tmp_line = line.strip()
|
|
||||||
if tmp_line.startswith("#"):
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
ncontents.append(line)
|
|
||||||
|
|
||||||
contents = os.linesep.join(ncontents)
|
|
||||||
try:
|
|
||||||
jdict = json.loads(contents)
|
|
||||||
if not type(jdict) is dict:
|
|
||||||
LOGGER.error('Root element in %s is not a dictionary!' % path)
|
|
||||||
return False
|
|
||||||
except Exception:
|
|
||||||
LOGGER.exception('Unable to parse: %s' % path)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def find_matching_files(path, pattern):
|
|
||||||
"""Search the given path for files matching the given pattern"""
|
|
||||||
|
|
||||||
regex = re.compile(pattern)
|
|
||||||
json_files = []
|
|
||||||
for root, dirs, files in os.walk(path):
|
|
||||||
for name in files:
|
|
||||||
if regex.search(name):
|
|
||||||
full_name = os.path.join(root, name)
|
|
||||||
json_files.append(full_name)
|
|
||||||
return json_files
|
|
||||||
|
|
||||||
|
|
||||||
def read_file(path):
|
|
||||||
"""Attempt to read a file safely
|
|
||||||
|
|
||||||
Returns the contents of the file as a string on success, False otherwise"""
|
|
||||||
try:
|
|
||||||
fp = open(path)
|
|
||||||
except IOError as e:
|
|
||||||
if e.errno == errno.EACCES:
|
|
||||||
# permission error
|
|
||||||
return False
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
with fp:
|
|
||||||
return fp.read()
|
|
||||||
|
|
||||||
|
|
||||||
def replace_file(path, new_contents):
|
|
||||||
"""Overwrite the file at the given path with the new contents
|
|
||||||
|
|
||||||
Returns True on success, False otherwise."""
|
|
||||||
try:
|
|
||||||
f = open(path, 'w')
|
|
||||||
f.write(new_contents)
|
|
||||||
f.close()
|
|
||||||
except:
|
|
||||||
LOGGER.error('Unable to write: %s' % f)
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
Loading…
x
Reference in New Issue
Block a user