# -*- encoding: utf-8 -*- # Copyright (C) 2012-2014 Red Hat, Inc. All rights reserved. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA # # Authors: Roman Rakus # """ Base class and utilities for all OpenLMI Account tests. """ import hashlib import os import tempfile import string import subprocess from collections import Counter from collections import OrderedDict import methods from lmi.test import lmibase class AccountBase(lmibase.LmiTestCase): """ Base class for all LMI Account tests. """ USE_EXCEPTIONS = True @classmethod def setUpClass(cls): lmibase.LmiTestCase.setUpClass.im_func(cls) cls.user_name = os.environ.get("LMI_ACCOUNT_USER") cls.group_name = os.environ.get("LMI_ACCOUNT_GROUP") ## ......................................................................... ## ## Validators ## ''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' ## class PasswdFile(): """ Parse /etc/passwd and perform basic heuristics to assess validity. By heuristics, I mean it's OK to include here what is considered to be "normal", or "expected" rather than strictly vaid/invalid. For example, you can consider "not normal" to have UID!=GID, but depending on what you did, it could be OK. OTOH, keep in mind that more specific things should be in the test itself. """ DEFAULT_OPTIONS = { 'username_prefix': 'user', 'unique': [ "name", "uid", ] } def __init__(self, options=None): self.options = self.__class__.DEFAULT_OPTIONS if options is not None: self.options.update(options) self.users = [] with open('/etc/passwd') as pf: lines = pf.readlines() self.fulltext = "".join(lines) for line in lines: fields = line.split(":") user = { "name": fields[0], "password": fields[1], "uid": fields[2], "gid": fields[3], "gecos": fields[4], "directory": fields[5], "shell": fields[6], } if user['name'].startswith(self.options['username_prefix']): self.users.append(user) def find_dups(self): """ Find dups in fields that should be unique """ dups = Counter() for field in self.options['unique']: if not methods.field_is_unique(field, self.users): dups[field] += 1 return dict(dups) def get_errors(self): """ Get hash of errors. """ errlist = {} dups = self.find_dups() if dups: errlist['duplicates'] = dups return errlist def get_names(self): """ Get list of user names """ return [u['name'] for u in self.users] ## ......................................................................... ## ## Other helpers ## ''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' ## class BackupStorage(): """ Simple file backup storage. * Only supports files. * Only supports absolute paths. * Consecutive backups rewrite each other. * Does not autodestroy the backup. """ def __init__(self): self.root = tempfile.mkdtemp(prefix=self.__class__.__name__ + ".") self.backups = OrderedDict() subprocess.check_call(["mkdir", "-p", self.root]) def _copy(self, src, dest): """ Copy src to dst --- force, keep meta, no questions asked """ subprocess.check_call(["cp", "-a", "-f", src, dest]) def _get_bpath(self, path): """ Take original path and return path to backup. """ if not path.startswith("/"): raise ValueError("only absolute paths are supported") digest = hashlib.sha1(path).hexdigest() return self.root + "/" + digest def _update_index(self): """ Create/update an index file to help in case of backup investigation For convenience, index file is sorted by real path. """ paths = sorted(self.backups.keys()) with open(self.root + "/index", "w+") as fh: for path in paths: fh.write("%s %s\n" % (self.backups[path], path)) def add_files(self, paths): """ Add list of tiles to backup storage """ for path in paths: self.add_file(path) def add_file(self, path): """ Add a file to backup storage """ bpath = self._get_bpath(path) self._copy(path, bpath) self.backups[path] = bpath self._update_index() def restore(self, path): """ Restore particular path """ try: self._copy(self.backups[path], path) except KeyError: raise ValueError("path not stored: %s" % path) def restore_all(self): """ Restore all stored paths in same order as they were stored """ for key in self.backups.keys(): self.restore(key) def destroy_backup(self): """ Destroy the temporary backup """ subprocess.call(["rm", "-rf", self.root]) class BaseCrippler: """ Helper class for crippling system files. To use the class, you need to sub-class it and implement _define_cases method. """ LINE_LENGTH = 500 LINE_COUNT = 50 BINARY_LENGTH = 10 * 1024 * 1024 ## virtual # def _define_cases(self): """ Define cases per file supported This function must return a dict with one set of cases per file: key is path and value is another dict defining cases as pairs of name ([a-zA-Z_]) and content. Quick example: { '/etc/file1': { 'case1': "some triggering content", 'case2': "some other triggering content", 'case3': "some funny triggering content", }, '/etc/file2': { 'case1': "some triggering content", 'case2': "some other triggering content", 'case3': "some funny triggering content", }, } Note that trailing newline is added automatically to each content string. Also, whether content will be appended or replaced is decided by caller of the BaseCrippler.cripple method. """ pass ## internal # def __init__(self): self.autocases = { 'empty': lambda: '', 'random_line': self._random_line, 'random_lines': self._random_lines, 'random_binary': self._random_binary, } self.cases = self._define_cases() def _append_to(self, path, content): with open(path, 'a+') as fh: fh.write(content) def _clobber(self, path, content): with open(path, 'w+') as fh: fh.write(content) def _random_binary(self, size=BINARY_LENGTH): chars = ''.join([chr(i) for i in xrange(256)]) return methods.random_string(strength=size, chars=chars) def _random_line(self, size=LINE_LENGTH): chars = string.letters + string.punctuation + " \t" return methods.random_string(strength=size, chars=chars) + "\n" def _random_lines(self, size=LINE_LENGTH, count=LINE_COUNT): return "".join([self._random_line(size) for i in xrange(count)]) def _get_content(self, path, case): try: content = self.autocases[case]() except KeyError: try: content = self.cases[path][case] + "\n" except KeyError: raise ValueError("unknown case: %s for: %s" % (case, path)) return content ## public # def all_cases_for(self, path): """ Return list of cases available for path """ return self.cases[path].keys() + self.autocases.keys() def all_paths(self): """ Return list of paths served by this implementation """ return self.cases.keys() def cripple(self, path, case, op="replace"): """ Cripple file according to selected case. op is either "append" or "replace" and means that the content will be appended to the file, otherwise it will replace it. """ if op == 'replace': self._clobber(path, self._get_content(path, case)) elif op == 'append': self._append_to(path, self._get_content(path, case)) else: raise ValueError("unknown op: %s" % op)