Refactor SETools queries/analyses to use descriptors instead of get/setters

This is Pythonic.
This commit is contained in:
Chris PeBenito 2015-05-16 21:59:10 -04:00
parent ab3772843e
commit e6f59d04e5
40 changed files with 1458 additions and 2574 deletions

47
seinfo
View File

@ -101,88 +101,84 @@ try:
if args.boolquery or args.all: if args.boolquery or args.all:
q = setools.BoolQuery(p) q = setools.BoolQuery(p)
if isinstance(args.boolquery, str): if isinstance(args.boolquery, str):
q.set_name(args.boolquery) q.name = args.boolquery
components.append(("Booleans", q, lambda x: x.statement())) components.append(("Booleans", q, lambda x: x.statement()))
if args.mlscatsquery or args.all: if args.mlscatsquery or args.all:
q = setools.CategoryQuery(p) q = setools.CategoryQuery(p)
if isinstance(args.mlscatsquery, str): if isinstance(args.mlscatsquery, str):
q.set_name(args.mlscatsquery) q.name = args.mlscatsquery
components.append(("Categories", q, lambda x: x.statement())) components.append(("Categories", q, lambda x: x.statement()))
if args.classquery or args.all: if args.classquery or args.all:
q = setools.ObjClassQuery(p) q = setools.ObjClassQuery(p)
if isinstance(args.classquery, str): if isinstance(args.classquery, str):
q.set_name(args.classquery) q.name = args.classquery
components.append(("Classes", q, lambda x: x.statement())) components.append(("Classes", q, lambda x: x.statement()))
if args.commonquery or args.all: if args.commonquery or args.all:
q = setools.CommonQuery(p) q = setools.CommonQuery(p)
if isinstance(args.commonquery, str): if isinstance(args.commonquery, str):
q.set_name(args.commonquery) q.name = args.commonquery
components.append(("Commons", q, lambda x: x.statement())) components.append(("Commons", q, lambda x: x.statement()))
if args.constraintquery or args.all: if args.constraintquery or args.all:
q = setools.ConstraintQuery(p, ruletype=["constrain", "mlsconstrain"]) q = setools.ConstraintQuery(p, ruletype=["constrain", "mlsconstrain"])
if isinstance(args.constraintquery, str): if isinstance(args.constraintquery, str):
# pylint: disable=no-member q.tclass = [args.constraintquery]
q.set_tclass(args.constraintquery)
components.append(("Constraints", q, lambda x: x.statement())) components.append(("Constraints", q, lambda x: x.statement()))
if args.fsusequery or args.all: if args.fsusequery or args.all:
q = setools.FSUseQuery(p) q = setools.FSUseQuery(p)
if isinstance(args.fsusequery, str): if isinstance(args.fsusequery, str):
# pylint: disable=no-member q.fs = args.fsusequery
q.set_fs(args.fsusequery)
components.append(("Fs_use", q, lambda x: x.statement())) components.append(("Fs_use", q, lambda x: x.statement()))
if args.genfsconquery or args.all: if args.genfsconquery or args.all:
q = setools.GenfsconQuery(p) q = setools.GenfsconQuery(p)
if isinstance(args.genfsconquery, str): if isinstance(args.genfsconquery, str):
# pylint: disable=no-member q.fs = args.genfsconquery
q.set_fs(args.genfsconquery)
components.append(("Genfscon", q, lambda x: x.statement())) components.append(("Genfscon", q, lambda x: x.statement()))
if args.initialsidquery or args.all: if args.initialsidquery or args.all:
q = setools.InitialSIDQuery(p) q = setools.InitialSIDQuery(p)
if isinstance(args.initialsidquery, str): if isinstance(args.initialsidquery, str):
q.set_name(args.initialsidquery) q.name = args.initialsidquery
components.append(("Initial SIDs", q, lambda x: x.statement())) components.append(("Initial SIDs", q, lambda x: x.statement()))
if args.netifconquery or args.all: if args.netifconquery or args.all:
q = setools.NetifconQuery(p) q = setools.NetifconQuery(p)
if isinstance(args.netifconquery, str): if isinstance(args.netifconquery, str):
q.set_name(args.netifconquery) q.name = args.netifconquery
components.append(("Netifcon", q, lambda x: x.statement())) components.append(("Netifcon", q, lambda x: x.statement()))
if args.nodeconquery or args.all: if args.nodeconquery or args.all:
q = setools.NodeconQuery(p) q = setools.NodeconQuery(p)
if isinstance(args.nodeconquery, str): if isinstance(args.nodeconquery, str):
# pylint: disable=no-member q.network = args.nodeconquery
q.set_network(args.nodeconquery)
components.append(("Nodecon", q, lambda x: x.statement())) components.append(("Nodecon", q, lambda x: x.statement()))
if args.permissivequery or args.all: if args.permissivequery or args.all:
q = setools.TypeQuery(p, permissive=True, match_permissive=True) q = setools.TypeQuery(p, permissive=True, match_permissive=True)
if isinstance(args.permissivequery, str): if isinstance(args.permissivequery, str):
q.set_name(args.permissivequery) q.name = args.permissivequery
components.append(("Permissive Types", q, lambda x: x.statement())) components.append(("Permissive Types", q, lambda x: x.statement()))
if args.polcapquery or args.all: if args.polcapquery or args.all:
q = setools.PolCapQuery(p) q = setools.PolCapQuery(p)
if isinstance(args.polcapquery, str): if isinstance(args.polcapquery, str):
q.set_name(args.polcapquery) q.name = args.polcapquery
components.append(("Polcap", q, lambda x: x.statement())) components.append(("Polcap", q, lambda x: x.statement()))
@ -195,11 +191,9 @@ try:
parser.error("Enter a port number or range, e.g. 22 or 6000-6020") parser.error("Enter a port number or range, e.g. 22 or 6000-6020")
if len(ports) == 2: if len(ports) == 2:
# pylint: disable=no-member q.ports = ports
q.set_ports((ports[0], ports[1]))
elif len(ports) == 1: elif len(ports) == 1:
# pylint: disable=no-member q.ports = (ports[0], ports[0])
q.set_ports((ports[0], ports[0]))
else: else:
parser.error("Enter a port number or range, e.g. 22 or 6000-6020") parser.error("Enter a port number or range, e.g. 22 or 6000-6020")
@ -208,43 +202,42 @@ try:
if args.rolequery or args.all: if args.rolequery or args.all:
q = setools.RoleQuery(p) q = setools.RoleQuery(p)
if isinstance(args.rolequery, str): if isinstance(args.rolequery, str):
q.set_name(args.rolequery) q.name = args.rolequery
components.append(("Roles", q, lambda x: x.statement())) components.append(("Roles", q, lambda x: x.statement()))
if args.mlssensquery or args.all: if args.mlssensquery or args.all:
q = setools.SensitivityQuery(p) q = setools.SensitivityQuery(p)
if isinstance(args.mlssensquery, str): if isinstance(args.mlssensquery, str):
q.set_name(args.mlssensquery) q.name = args.mlssensquery
components.append(("Sensitivities", q, lambda x: x.statement())) components.append(("Sensitivities", q, lambda x: x.statement()))
if args.typequery or args.all: if args.typequery or args.all:
q = setools.TypeQuery(p) q = setools.TypeQuery(p)
if isinstance(args.typequery, str): if isinstance(args.typequery, str):
q.set_name(args.typequery) q.name = args.typequery
components.append(("Types", q, lambda x: x.statement())) components.append(("Types", q, lambda x: x.statement()))
if args.typeattrquery or args.all: if args.typeattrquery or args.all:
q = setools.TypeAttributeQuery(p) q = setools.TypeAttributeQuery(p)
if isinstance(args.typeattrquery, str): if isinstance(args.typeattrquery, str):
q.set_name(args.typeattrquery) q.name = args.typeattrquery
components.append(("Type Attributes", q, expand_attr)) components.append(("Type Attributes", q, expand_attr))
if args.userquery or args.all: if args.userquery or args.all:
q = setools.UserQuery(p) q = setools.UserQuery(p)
if isinstance(args.userquery, str): if isinstance(args.userquery, str):
q.set_name(args.userquery) q.name = args.userquery
components.append(("Users", q, lambda x: x.statement())) components.append(("Users", q, lambda x: x.statement()))
if args.validatetransquery or args.all: if args.validatetransquery or args.all:
q = setools.ConstraintQuery(p, ruletype=["validatetrans", "mlsvalidatetrans"]) q = setools.ConstraintQuery(p, ruletype=["validatetrans", "mlsvalidatetrans"])
if isinstance(args.validatetransquery, str): if isinstance(args.validatetransquery, str):
# pylint: disable=no-member q.tclass = [args.validatetransquery]
q.set_tclass(args.validatetransquery)
components.append(("Validatetrans", q, lambda x: x.statement())) components.append(("Validatetrans", q, lambda x: x.statement()))

View File

@ -79,7 +79,7 @@ else:
try: try:
p = setools.SELinuxPolicy(args.policy) p = setools.SELinuxPolicy(args.policy)
m = setools.PermissionMap(args.map) m = setools.PermissionMap(args.map)
g = setools.InfoFlowAnalysis(p, m, minweight=args.min_weight, exclude=args.exclude) g = setools.InfoFlowAnalysis(p, m, min_weight=args.min_weight, exclude=args.exclude)
if args.shortest_path or args.all_paths: if args.shortest_path or args.all_paths:
if args.shortest_path: if args.shortest_path:

View File

@ -135,18 +135,18 @@ try:
# with an empty string in it (split on empty string) # with an empty string in it (split on empty string)
if args.tclass: if args.tclass:
if args.tclass_regex: if args.tclass_regex:
q.set_tclass(args.tclass) q.tclass = args.tclass
else: else:
q.set_tclass(args.tclass.split(",")) q.tclass = args.tclass.split(",")
if args.perms: if args.perms:
q.set_perms(args.perms.split(",")) q.perms = args.perms.split(",")
if args.boolean: if args.boolean:
if args.boolean_regex: if args.boolean_regex:
q.set_boolean(args.boolean) q.boolean = args.boolean
else: else:
q.set_boolean(args.boolean.split(",")) q.boolean = args.boolean.split(",")
for r in sorted(q.results()): for r in sorted(q.results()):
print(r) print(r)
@ -168,9 +168,9 @@ try:
# with an empty string in it (split on empty string) # with an empty string in it (split on empty string)
if args.tclass: if args.tclass:
if args.tclass_regex: if args.tclass_regex:
q.set_tclass(args.tclass) q.tclass = args.tclass
else: else:
q.set_tclass(args.tclass.split(",")) q.tclass = args.tclass.split(",")
for r in sorted(q.results()): for r in sorted(q.results()):
print(r) print(r)
@ -189,9 +189,9 @@ try:
# with an empty string in it (split on empty string) # with an empty string in it (split on empty string)
if args.tclass: if args.tclass:
if args.tclass_regex: if args.tclass_regex:
q.set_tclass(args.tclass) q.tclass = args.tclass
else: else:
q.set_tclass(args.tclass.split(",")) q.tclass = args.tclass.split(",")
for r in sorted(q.results()): for r in sorted(q.results()):
print(r) print(r)

View File

@ -19,61 +19,48 @@
import logging import logging
from . import compquery from . import compquery
from .descriptors import CriteriaDescriptor
class BoolQuery(compquery.ComponentQuery): class BoolQuery(compquery.ComponentQuery):
"""Query SELinux policy Booleans.""" """Query SELinux policy Booleans.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
default=False, match_default=False):
"""
Parameter:
policy The policy to query.
name The Boolean name to match.
name_regex If true, regular expression matching
will be used on the Boolean name.
default The default state to match.
match_default If true, the default state will be matched.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The Boolean name to match.
self.set_default(match_default, default=default) name_regex If true, regular expression matching
will be used on the Boolean name.
default The default state to match. If this
is None, the default state not be matched.
"""
_default = None
@property
def default(self):
return self._default
@default.setter
def default(self, value):
if value is None:
self._default = None
else:
self._default = bool(value)
def results(self): def results(self):
"""Generator which yields all Booleans matching the criteria.""" """Generator which yields all Booleans matching the criteria."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Default: {0.match_default}, state: {0.default}".format(self)) self.log.debug("Default: {0.default}".format(self))
for boolean in self.policy.bools(): for boolean in self.policy.bools():
if self.name and not self._match_name(boolean): if not self._match_name(boolean):
continue continue
if self.match_default and boolean.state != self.default: if self.default is not None and boolean.state != self.default:
continue continue
yield boolean yield boolean
def set_default(self, match, **opts):
"""
Set if the default Boolean state should be matched.
Parameter:
match If true, the default state will be matched.
default The default state to match.
Exceptions:
NameError Invalid keyword option.
"""
self.match_default = bool(match)
for k in list(opts.keys()):
if k == "default":
self.default = bool(opts[k])
else:
raise NameError("Invalid default option: {0}".format(k))

View File

@ -24,37 +24,32 @@ from . import mixins
class CategoryQuery(mixins.MatchAlias, compquery.ComponentQuery): class CategoryQuery(mixins.MatchAlias, compquery.ComponentQuery):
"""Query MLS Categories""" """
Query MLS Categories
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
alias=None, alias_regex=False):
"""
Parameters:
name The name of the category to match.
name_regex If true, regular expression matching will
be used for matching the name.
alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The name of the category to match.
self.set_alias(alias, regex=alias_regex) name_regex If true, regular expression matching will
be used for matching the name.
alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
"""
def results(self): def results(self):
"""Generator which yields all matching categories.""" """Generator which yields all matching categories."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Alias: {0.alias_cmp}, regex: {0.alias_regex}".format(self)) self.log.debug("Alias: {0.alias}, regex: {0.alias_regex}".format(self))
for cat in self.policy.categories(): for cat in self.policy.categories():
if self.name and not self._match_name(cat): if not self._match_name(cat):
continue continue
if self.alias and not self._match_alias(cat.aliases()): if not self._match_alias(cat):
continue continue
yield cat yield cat

View File

@ -19,84 +19,42 @@
import logging import logging
import re import re
from . import compquery from . import compquery, mixins
class CommonQuery(compquery.ComponentQuery): class CommonQuery(mixins.MatchPermission, compquery.ComponentQuery):
"""Query common permission sets.""" """
Query common permission sets.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
perms=None, perms_equal=False, perms_regex=False):
"""
Parameters:
name The name of the common to match.
name_regex If true, regular expression matching will
be used for matching the name.
perms The permissions to match.
perms_equal If true, only commons with permission sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
perms_regex If true, regular expression matching will be used
on the permission names instead of set logic.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The name of the common to match.
self.set_perms(perms, regex=perms_regex, equal=perms_equal) name_regex If true, regular expression matching will
be used for matching the name.
perms The permissions to match.
perms_equal If true, only commons with permission sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
perms_regex If true, regular expression matching will be used
on the permission names instead of set logic.
"""
def results(self): def results(self):
"""Generator which yields all matching commons.""" """Generator which yields all matching commons."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Perms: {0.perms_cmp!r}, regex: {0.perms_regex}, eq: {0.perms_equal}". self.log.debug("Perms: {0.perms!r}, regex: {0.perms_regex}, eq: {0.perms_equal}".
format(self)) format(self))
for com in self.policy.commons(): for com in self.policy.commons():
if self.name and not self._match_name(com): if not self._match_name(com):
continue continue
if self.perms and not self._match_regex_or_set( if not self._match_perms(com):
com.perms,
self.perms_cmp,
self.perms_equal,
self.perms_regex):
continue continue
yield com yield com
def set_perms(self, perms, **opts):
"""
Set the criteria for the common's permissions.
Parameter:
perms Name to match the common's permissions.
Keyword Options:
regex If true, regular expression matching will be used.
equal If true, the permisison set of the common
must equal the permissions criteria to
match. If false, any intersection in the
critera will cause a match.
Exceptions:
NameError Invalid keyword option.
"""
self.perms = perms
for k in list(opts.keys()):
if k == "regex":
self.perms_regex = opts[k]
elif k == "equal":
self.perms_equal = opts[k]
else:
raise NameError("Invalid permissions option: {0}".format(k))
if self.perms_regex:
self.perms_cmp = re.compile(self.perms)
else:
self.perms_cmp = self.perms

View File

@ -20,37 +20,20 @@
import re import re
from . import query from . import query
from .descriptors import CriteriaDescriptor
class ComponentQuery(query.PolicyQuery): class ComponentQuery(query.PolicyQuery):
"""Abstract base class for SETools component queries.""" """Base class for SETools component queries."""
name = CriteriaDescriptor("name_regex")
name_regex = False
def _match_name(self, obj): def _match_name(self, obj):
"""Match the object to the name criteria.""" """Match the object to the name criteria."""
return self._match_regex(obj, self.name_cmp, self.name_regex) if not self.name:
# if there is no criteria, everything matches.
return True
def set_name(self, name, **opts): return self._match_regex(obj, self.name, self.name_regex)
"""
Set the criteria for matching the component's name.
Parameter:
name Name to match the component's name.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.name = name
for k in list(opts.keys()):
if k == "regex":
self.name_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if self.name_regex:
self.name_cmp = re.compile(self.name)
else:
self.name_cmp = self.name

View File

@ -19,62 +19,58 @@
import logging import logging
import re import re
from . import mixins from . import mixins, query
from .query import PolicyQuery from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor, RuletypeDescriptor
from .policyrep.exception import ConstraintUseError from .policyrep.exception import ConstraintUseError
class ConstraintQuery(mixins.MatchObjClass, mixins.MatchPermission, PolicyQuery): class ConstraintQuery(mixins.MatchObjClass, mixins.MatchPermission, query.PolicyQuery):
"""Query constraint rules, (mls)constrain/(mls)validatetrans.""" """
Query constraint rules, (mls)constrain/(mls)validatetrans.
def __init__(self, policy, Parameter:
ruletype=None, policy The policy to query.
tclass=None, tclass_regex=False,
perms=None, perms_equal=False,
role=None, role_regex=False, role_indirect=True,
type_=None, type_regex=False, type_indirect=True,
user=None, user_regex=False):
""" Keyword Parameters/Class attributes:
Parameter: ruletype The list of rule type(s) to match.
policy The policy to query. tclass The object class(es) to match.
ruletype The rule type(s) to match. tclass_regex If true, use a regular expression for
tclass The object class(es) to match. matching the rule's object class.
tclass_regex If true, use a regular expression for perms The permission(s) to match.
matching the rule's object class. perms_equal If true, the permission set of the rule
perms The permission(s) to match. must exactly match the permissions
perms_equal If true, the permission set of the rule criteria. If false, any set intersection
must exactly match the permissions will match.
criteria. If false, any set intersection perms_regex If true, regular expression matching will be used
will match. on the permission names instead of set logic.
role The name of the role to match in the role The name of the role to match in the
constraint expression. constraint expression.
role_indirect If true, members of an attribute will be role_indirect If true, members of an attribute will be
matched rather than the attribute itself. matched rather than the attribute itself.
role_regex If true, regular expression matching will role_regex If true, regular expression matching will
be used on the role. be used on the role.
type_ The name of the type/attribute to match in the type_ The name of the type/attribute to match in the
constraint expression. constraint expression.
type_indirect If true, members of an attribute will be type_indirect If true, members of an attribute will be
matched rather than the attribute itself. matched rather than the attribute itself.
type_regex If true, regular expression matching will type_regex If true, regular expression matching will
be used on the type/attribute. be used on the type/attribute.
user The name of the user to match in the user The name of the user to match in the
constraint expression. constraint expression.
user_regex If true, regular expression matching will user_regex If true, regular expression matching will
be used on the user. be used on the user.
""" """
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy ruletype = RuletypeDescriptor("validate_constraint_ruletype")
user = CriteriaDescriptor("user_regex", "lookup_user")
self.set_ruletype(ruletype) user_regex = False
self.set_tclass(tclass, regex=tclass_regex) role = CriteriaDescriptor("role_regex", "lookup_role")
self.set_perms(perms, equal=perms_equal) role_regex = False
self.set_role(role, regex=role_regex, indirect=role_indirect) role_indirect = True
self.set_type(type_, regex=type_regex, indirect=type_indirect) type_ = CriteriaDescriptor("type_regex", "lookup_type_or_attr")
self.set_user(user, regex=user_regex) type_regex = False
type_indirect = True
def _match_expr(self, expr, criteria, indirect, regex): def _match_expr(self, expr, criteria, indirect, regex):
""" """
@ -101,143 +97,46 @@ class ConstraintQuery(mixins.MatchObjClass, mixins.MatchPermission, PolicyQuery)
"""Generator which yields all matching constraints rules.""" """Generator which yields all matching constraints rules."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Ruletypes: {0.ruletype}".format(self)) self.log.debug("Ruletypes: {0.ruletype}".format(self))
self.log.debug("Class: {0.tclass_cmp!r}, regex: {0.tclass_regex}".format(self)) self.log.debug("Class: {0.tclass!r}, regex: {0.tclass_regex}".format(self))
self.log.debug("Perms: {0.perms_cmp}, eq: {0.perms_equal}".format(self)) self.log.debug("Perms: {0.perms!r}, regex: {0.perms_regex}, eq: {0.perms_equal}".
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
for c in self.policy.constraints(): for c in self.policy.constraints():
if self.ruletype: if self.ruletype:
if c.ruletype not in self.ruletype: if c.ruletype not in self.ruletype:
continue continue
if self.tclass and not self._match_object_class(c.tclass): if not self._match_object_class(c):
continue continue
if self.perms: try:
try: if not self._match_perms(c):
if not self._match_perms(c.perms): continue
continue except ConstraintUseError:
except ConstraintUseError: continue
continue
if self.role and not self._match_expr( if self.role and not self._match_expr(
c.roles, c.roles,
self.role_cmp, self.role,
self.role_indirect, self.role_indirect,
self.role_regex): self.role_regex):
continue continue
if self.type_ and not self._match_expr( if self.type_ and not self._match_expr(
c.types, c.types,
self.type_cmp, self.type_,
self.type_indirect, self.type_indirect,
self.type_regex): self.type_regex):
continue continue
if self.user and not self._match_expr( if self.user and not self._match_expr(
c.users, c.users,
self.user_cmp, self.user,
False, False,
self.user_regex): self.user_regex):
continue continue
yield c yield c
def set_ruletype(self, ruletype):
"""
Set the rule types for the rule query.
Parameter:
ruletype The rule types to match.
"""
if ruletype:
self.policy.validate_constraint_ruletype(ruletype)
self.ruletype = ruletype
def set_role(self, role, **opts):
"""
Set the criteria for matching the constraint's role.
Parameter:
role Name to match the constraint's role.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.role = role
for k in list(opts.keys()):
if k == "regex":
self.role_regex = opts[k]
elif k == "indirect":
self.role_indirect = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.role:
self.role_cmp = None
elif self.role_regex:
self.role_cmp = re.compile(self.role)
else:
self.role_cmp = self.policy.lookup_role(self.role)
def set_type(self, type_, **opts):
"""
Set the criteria for matching the constraint's type.
Parameter:
type_ Name to match the constraint's type.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.type_ = type_
for k in list(opts.keys()):
if k == "regex":
self.type_regex = opts[k]
elif k == "indirect":
self.type_indirect = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.type_:
self.type_cmp = None
elif self.type_regex:
self.type_cmp = re.compile(self.type_)
else:
self.type_cmp = self.policy.lookup_type(type_)
def set_user(self, user, **opts):
"""
Set the criteria for matching the constraint's user.
Parameter:
user Name to match the constraint's user.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.user = user
for k in list(opts.keys()):
if k == "regex":
self.user_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.user:
self.user_cmp = None
elif self.user_regex:
self.user_cmp = re.compile(self.user)
else:
self.user_cmp = self.policy.lookup_user(self.user)

View File

@ -20,190 +20,79 @@
import re import re
from . import query from . import query
from .descriptors import CriteriaDescriptor
class ContextQuery(query.PolicyQuery): class ContextQuery(query.PolicyQuery):
"""Abstract base class for SETools in-policy labeling/context queries.""" """
Base class for SETools in-policy labeling/context queries.
@staticmethod Parameter:
def _match_context(context, policy The policy to query.
user, user_regex,
role, role_regex,
type_, type_regex,
range_, range_subset, range_overlap, range_superset, range_proper):
"""
Match the context with optional regular expression.
Parameters: Keyword Parameters/Class attributes:
context The object to match. context The object to match.
user The user to match in the context. user The user to match in the context.
user_regex If true, regular expression matching user_regex If true, regular expression matching
will be used on the user. will be used on the user.
role The role to match in the context. role The role to match in the context.
role_regex If true, regular expression matching role_regex If true, regular expression matching
will be used on the role. will be used on the role.
type_ The type to match in the context. type_ The type to match in the context.
type_regex If true, regular expression matching type_regex If true, regular expression matching
will be used on the type. will be used on the type.
range_ The range to match in the context. range_ The range to match in the context.
range_subset If true, the criteria will match if it range_subset If true, the criteria will match if it
is a subset of the context's range. is a subset of the context's range.
range_overlap If true, the criteria will match if it range_overlap If true, the criteria will match if it
overlaps any of the context's range. overlaps any of the context's range.
range_superset If true, the criteria will match if it range_superset If true, the criteria will match if it
is a superset of the context's range. is a superset of the context's range.
range_proper If true, use proper superset/subset range_proper If true, use proper superset/subset
on range matching operations. on range matching operations.
No effect if not using set operations. No effect if not using set operations.
""" """
if user and not query.PolicyQuery._match_regex( user = CriteriaDescriptor("user_regex", "lookup_user")
user_regex = False
role = CriteriaDescriptor("role_regex", "lookup_role")
role_regex = False
type_ = CriteriaDescriptor("type_regex", "lookup_type")
type_regex = False
range_ = CriteriaDescriptor(lookup_function="lookup_range")
range_overlap = False
range_subset = False
range_superset = False
range_proper = False
def _match_context(self, context):
if self.user and not query.PolicyQuery._match_regex(
context.user, context.user,
user, self.user,
user_regex): self.user_regex):
return False return False
if role and not query.PolicyQuery._match_regex( if self.role and not query.PolicyQuery._match_regex(
context.role, context.role,
role, self.role,
role_regex): self.role_regex):
return False return False
if type_ and not query.PolicyQuery._match_regex( if self.type_ and not query.PolicyQuery._match_regex(
context.type_, context.type_,
type_, self.type_,
type_regex): self.type_regex):
return False return False
if range_ and not query.PolicyQuery._match_range( if self.range_ and not query.PolicyQuery._match_range(
context.range_, context.range_,
range_, self.range_,
range_subset, self.range_subset,
range_overlap, self.range_overlap,
range_superset, self.range_superset,
range_proper): self.range_proper):
return False return False
return True return True
def set_user(self, user, **opts):
"""
Set the criteria for matching the context's user.
Parameter:
user Name to match the context's user.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.user = user
for k in list(opts.keys()):
if k == "regex":
self.user_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.user:
self.user_cmp = None
elif self.user_regex:
self.user_cmp = re.compile(self.user)
else:
self.user_cmp = self.policy.lookup_user(self.user)
def set_role(self, role, **opts):
"""
Set the criteria for matching the context's role.
Parameter:
role Name to match the context's role.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.role = role
for k in list(opts.keys()):
if k == "regex":
self.role_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.role:
self.role_cmp = None
elif self.role_regex:
self.role_cmp = re.compile(self.role)
else:
self.role_cmp = self.policy.lookup_role(self.role)
def set_type(self, type_, **opts):
"""
Set the criteria for matching the context's type.
Parameter:
type_ Name to match the context's type.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.type_ = type_
for k in list(opts.keys()):
if k == "regex":
self.type_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.type_:
self.type_cmp = None
elif self.type_regex:
self.type_cmp = re.compile(self.type_)
else:
self.type_cmp = self.policy.lookup_type(type_)
def set_range(self, range_, **opts):
"""
Set the criteria for matching the context's range.
Parameter:
range_ Criteria to match the context's range.
Keyword Parameters:
subset If true, the criteria will match if it is a subset
of the context's range.
overlap If true, the criteria will match if it overlaps
any of the context's range.
superset If true, the criteria will match if it is a superset
of the context's range.
proper If true, use proper superset/subset operations.
No effect if not using set operations.
Exceptions:
NameError Invalid keyword option.
"""
self.range_ = range_
for k in list(opts.keys()):
if k == "subset":
self.range_subset = opts[k]
elif k == "overlap":
self.range_overlap = opts[k]
elif k == "superset":
self.range_superset = opts[k]
elif k == "proper":
self.range_proper = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if self.range_:
self.range_cmp = self.policy.lookup_range(self.range_)
else:
self.range_cmp = None

230
setools/descriptors.py Normal file
View File

@ -0,0 +1,230 @@
# Copyright 2015, Tresys Technology, LLC
#
# This file is part of SETools.
#
# SETools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1 of
# the License, or (at your option) any later version.
#
# SETools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SETools. If not, see
# <http://www.gnu.org/licenses/>.
#
"""
SETools descriptors.
These classes override how a class's attributes are get/set/deleted.
This is how the @property decorator works.
See https://docs.python.org/3/howto/descriptor.html
for more details.
"""
import re
from collections import defaultdict
from weakref import WeakKeyDictionary
#
# Query criteria descriptors
#
# Implementation note: if the name_regex attribute value
# is changed the criteria must be reset.
#
class CriteriaDescriptor(object):
"""
Single item criteria descriptor.
Parameters:
name_regex The name of instance's regex setting attribute;
used as name_regex below. If unset,
regular expressions will never be used.
lookup_function The name of the SELinuxPolicy lookup function,
e.g. lookup_type or lookup_boolean.
default_value The default value of the criteria. The default
is None.
Read-only instance attribute use (obj parameter):
policy The instance of SELinuxPolicy
name_regex This attribute is read to determine if
the criteria should be looked up or
compiled into a regex. If the attribute
does not exist, False is assumed.
"""
def __init__(self, name_regex=None, lookup_function=None, default_value=None):
assert name_regex or lookup_function, "A simple attribute should be used if there is " \
"no regex nor lookup function."
self.regex = name_regex
self.default_value = default_value
self.lookup_function = lookup_function
# use weak references so instances can be
# garbage collected, rather than unnecessarily
# kept around due to this descriptor.
self.instances = WeakKeyDictionary()
def __get__(self, obj, objtype=None):
if obj is None:
return self
return self.instances.setdefault(obj, self.default_value)
def __set__(self, obj, value):
if not value:
self.instances[obj] = None
elif self.regex and getattr(obj, self.regex, False):
self.instances[obj] = re.compile(value)
elif self.lookup_function:
lookup = getattr(obj.policy, self.lookup_function)
self.instances[obj] = lookup(value)
else:
self.instances[obj] = value
class CriteriaSetDescriptor(CriteriaDescriptor):
"""Descriptor for a set of criteria."""
def __set__(self, obj, value):
if not value:
self.instances[obj] = None
elif self.regex and getattr(obj, self.regex, False):
self.instances[obj] = re.compile(value)
elif self.lookup_function:
lookup = getattr(obj.policy, self.lookup_function)
self.instances[obj] = set(lookup(v) for v in value)
else:
self.instances[obj] = set(value)
class RuletypeDescriptor(object):
"""
Descriptor for a list of rule types.
Parameters:
validator The name of the SELinuxPolicy ruletype
validator function, e.g. validate_te_ruletype
default_value The default value of the criteria. The default
is None.
Read-only instance attribute use (obj parameter):
policy The instance of SELinuxPolicy
"""
def __init__(self, validator):
self.validator = validator
# use weak references so instances can be
# garbage collected, rather than unnecessarily
# kept around due to this descriptor.
self.instances = WeakKeyDictionary()
def __get__(self, obj, objtype=None):
if obj is None:
return self
return self.instances.setdefault(obj, None)
def __set__(self, obj, value):
if value:
validate = getattr(obj.policy, self.validator)
validate(value)
self.instances[obj] = value
else:
self.instances[obj] = None
#
# NetworkX Graph Descriptors
#
# These descriptors are used to simplify all
# of the dictionary use in the NetworkX graph.
#
class NetworkXGraphEdgeDescriptor(object):
"""
Descriptor base class for NetworkX graph edge attributes.
Parameter:
name The edge property name
Instance class attribute use (obj parameter):
G The NetworkX graph
source The edge's source node
target The edge's target node
"""
def __init__(self, propname):
self.name = propname
def __get__(self, obj, objtype=None):
if obj is None:
return self
return obj.G[obj.source][obj.target][self.name]
def __set__(self, obj, value):
raise NotImplementedError
def __delete__(self, obj):
raise NotImplementedError
class EdgeAttrDict(NetworkXGraphEdgeDescriptor):
"""A descriptor for edge attributes that are dictionaries."""
def __set__(self, obj, value):
# None is a special value to initialize the attribute
if value is None:
obj.G[obj.source][obj.target][self.name] = defaultdict(list)
else:
raise ValueError("{0} dictionaries should not be assigned directly".format(self.name))
def __delete__(self, obj):
obj.G[obj.source][obj.target][self.name].clear()
class EdgeAttrIntMax(NetworkXGraphEdgeDescriptor):
"""
A descriptor for edge attributes that are non-negative integers that always
keep the max assigned value until re-initialized.
"""
def __set__(self, obj, value):
# None is a special value to initialize
if value is None:
obj.G[obj.source][obj.target][self.name] = 0
else:
current_value = obj.G[obj.source][obj.target][self.name]
obj.G[obj.source][obj.target][self.name] = max(current_value, value)
class EdgeAttrList(NetworkXGraphEdgeDescriptor):
"""A descriptor for edge attributes that are lists."""
def __set__(self, obj, value):
# None is a special value to initialize
if value is None:
obj.G[obj.source][obj.target][self.name] = []
else:
raise ValueError("{0} lists should not be assigned directly".format(self.name))
def __delete__(self, obj):
# in Python3 a .clear() function was added for lists
# keep this implementation for Python 2 compat
del obj.G[obj.source][obj.target][self.name][:]

View File

@ -23,7 +23,7 @@ from collections import defaultdict, namedtuple
import networkx as nx import networkx as nx
from networkx.exception import NetworkXError, NetworkXNoPath from networkx.exception import NetworkXError, NetworkXNoPath
from .infoflow import EdgeAttrList from .descriptors import EdgeAttrDict, EdgeAttrList
__all__ = ['DomainTransitionAnalysis'] __all__ = ['DomainTransitionAnalysis']
@ -55,37 +55,32 @@ class DomainTransitionAnalysis(object):
self.log = logging.getLogger(self.__class__.__name__) self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy self.policy = policy
self.set_exclude(exclude) self.exclude = exclude
self.set_reverse(reverse) self.reverse = reverse
self.rebuildgraph = True self.rebuildgraph = True
self.rebuildsubgraph = True self.rebuildsubgraph = True
self.G = nx.DiGraph() self.G = nx.DiGraph()
self.subG = None self.subG = None
def set_reverse(self, reverse): @property
""" def reverse(self):
Set forward/reverse DTA direction. return self._reverse
Parameter: @reverse.setter
reverse If true, a reverse DTA is performed, otherwise a def reverse(self, direction):
forward DTA is performed. self._reverse = bool(direction)
"""
self.reverse = bool(reverse)
self.rebuildsubgraph = True self.rebuildsubgraph = True
def set_exclude(self, exclude): @property
""" def exclude(self):
Set the domains to exclude from the domain transition analysis. return self._exclude
Parameter: @exclude.setter
exclude A list of types. def exclude(self, types):
""" if types:
self._exclude = [self.policy.lookup_type(t) for t in types]
if exclude:
self.exclude = [self.policy.lookup_type(t) for t in exclude]
else: else:
self.exclude = [] self._exclude = None
self.rebuildsubgraph = True self.rebuildsubgraph = True
@ -558,32 +553,6 @@ class DomainTransitionAnalysis(object):
self.log.info("Completed building subgraph.") self.log.info("Completed building subgraph.")
class EdgeAttrDict(object):
"""
A descriptor for edge attributes that are dictionaries.
Parameter:
name The edge property name
"""
def __init__(self, propname):
self.name = propname
def __get__(self, obj, type=None):
return obj.G[obj.source][obj.target][self.name]
def __set__(self, obj, value):
# None is a special value to initialize the attribute
if value is None:
obj.G[obj.source][obj.target][self.name] = defaultdict(list)
else:
raise ValueError("{0} dictionaries should not be assigned directly".format(self.name))
def __delete__(self, obj):
obj.G[obj.source][obj.target][self.name].clear()
class Edge(object): class Edge(object):
""" """

View File

@ -20,67 +20,54 @@ import logging
import re import re
from . import contextquery from . import contextquery
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor
class FSUseQuery(contextquery.ContextQuery): class FSUseQuery(contextquery.ContextQuery):
"""Query fs_use_* statements.""" """
Query fs_use_* statements.
def __init__(self, policy, Parameter:
ruletype=None, policy The policy to query.
fs=None, fs_regex=False,
user=None, user_regex=False,
role=None, role_regex=False,
type_=None, type_regex=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameters:
policy The policy to query.
ruletype The rule type(s) to match. Keyword Parameters/Class attributes:
fs The criteria to match the file system type. ruletype The rule type(s) to match.
fs_regex If true, regular expression matching fs The criteria to match the file system type.
will be used on the file system type. fs_regex If true, regular expression matching
user The criteria to match the context's user. will be used on the file system type.
user_regex If true, regular expression matching user The criteria to match the context's user.
will be used on the user. user_regex If true, regular expression matching
role The criteria to match the context's role. will be used on the user.
role_regex If true, regular expression matching role The criteria to match the context's role.
will be used on the role. role_regex If true, regular expression matching
type_ The criteria to match the context's type. will be used on the role.
type_regex If true, regular expression matching type_ The criteria to match the context's type.
will be used on the type. type_regex If true, regular expression matching
range_ The criteria to match the context's range. will be used on the type.
range_subset If true, the criteria will match if it is a subset range_ The criteria to match the context's range.
of the context's range. range_subset If true, the criteria will match if it is a subset
range_overlap If true, the criteria will match if it overlaps of the context's range.
any of the context's range. range_overlap If true, the criteria will match if it overlaps
range_superset If true, the criteria will match if it is a superset any of the context's range.
of the context's range. range_superset If true, the criteria will match if it is a superset
range_proper If true, use proper superset/subset operations. of the context's range.
No effect if not using set operations. range_proper If true, use proper superset/subset operations.
""" No effect if not using set operations.
self.log = logging.getLogger(self.__class__.__name__) """
self.policy = policy ruletype = None
fs = CriteriaDescriptor("fs_regex")
self.set_ruletype(ruletype) fs_regex = False
self.set_fs(fs, regex=fs_regex)
self.set_user(user, regex=user_regex)
self.set_role(role, regex=role_regex)
self.set_type(type_, regex=type_regex)
self.set_range(range_, overlap=range_overlap, subset=range_subset,
superset=range_superset, proper=range_proper)
def results(self): def results(self):
"""Generator which yields all matching fs_use_* statements.""" """Generator which yields all matching fs_use_* statements."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Ruletypes: {0.ruletype}".format(self)) self.log.debug("Ruletypes: {0.ruletype}".format(self))
self.log.debug("FS: {0.fs_cmp!r}, regex: {0.fs_regex}".format(self)) self.log.debug("FS: {0.fs!r}, regex: {0.fs_regex}".format(self))
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, " self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, "
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
@ -90,60 +77,11 @@ class FSUseQuery(contextquery.ContextQuery):
if self.fs and not self._match_regex( if self.fs and not self._match_regex(
fsu.fs, fsu.fs,
self.fs_cmp, self.fs,
self.fs_regex): self.fs_regex):
continue continue
if not self._match_context( if not self._match_context(fsu.context):
fsu.context,
self.user_cmp,
self.user_regex,
self.role_cmp,
self.role_regex,
self.type_cmp,
self.type_regex,
self.range_cmp,
self.range_subset,
self.range_overlap,
self.range_superset,
self.range_proper):
continue continue
yield fsu yield fsu
def set_ruletype(self, ruletype):
"""
Set the rule types for the rule query.
Parameter:
ruletype The rule types to match.
"""
self.ruletype = ruletype
def set_fs(self, fs, **opts):
"""
Set the criteria for matching the file system type.
Parameter:
fs Name to match the file system.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.fs = fs
for k in list(opts.keys()):
if k == "regex":
self.fs_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.fs:
self.fs_cmp = None
elif self.fs_regex:
self.fs_cmp = re.compile(self.fs)
else:
self.fs_cmp = self.fs

View File

@ -20,168 +20,79 @@ import logging
import re import re
from . import contextquery from . import contextquery
from .descriptors import CriteriaDescriptor
class GenfsconQuery(contextquery.ContextQuery): class GenfsconQuery(contextquery.ContextQuery):
"""Query genfscon statements.""" """
Query genfscon statements.
def __init__(self, policy, Parameter:
fs=None, fs_regex=False, policy The policy to query.
path=None, path_regex=False,
filetype=None,
user=None, user_regex=False,
role=None, role_regex=False,
type_=None, type_regex=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameters:
policy The policy to query.
fs The criteria to match the file system type. Keyword Parameters/Class attributes:
fs_regex If true, regular expression matching fs The criteria to match the file system type.
will be used on the file system type. fs_regex If true, regular expression matching
path The criteria to match the path. will be used on the file system type.
path_regex If true, regular expression matching path The criteria to match the path.
will be used on the path. path_regex If true, regular expression matching
user The criteria to match the context's user. will be used on the path.
user_regex If true, regular expression matching user The criteria to match the context's user.
will be used on the user. user_regex If true, regular expression matching
role The criteria to match the context's role. will be used on the user.
role_regex If true, regular expression matching role The criteria to match the context's role.
will be used on the role. role_regex If true, regular expression matching
type_ The criteria to match the context's type. will be used on the role.
type_regex If true, regular expression matching type_ The criteria to match the context's type.
will be used on the type. type_regex If true, regular expression matching
range_ The criteria to match the context's range. will be used on the type.
range_subset If true, the criteria will match if it is a subset range_ The criteria to match the context's range.
of the context's range. range_subset If true, the criteria will match if it is a subset
range_overlap If true, the criteria will match if it overlaps of the context's range.
any of the context's range. range_overlap If true, the criteria will match if it overlaps
range_superset If true, the criteria will match if it is a superset any of the context's range.
of the context's range. range_superset If true, the criteria will match if it is a superset
range_proper If true, use proper superset/subset operations. of the context's range.
No effect if not using set operations. range_proper If true, use proper superset/subset operations.
""" No effect if not using set operations.
self.log = logging.getLogger(self.__class__.__name__) """
self.policy = policy filetype = None
fs = CriteriaDescriptor("fs_regex")
self.set_fs(fs, regex=fs_regex) fs_regex = False
self.set_path(path, regex=path_regex) path = CriteriaDescriptor("path_regex")
self.set_filetype(filetype) path_regex = False
self.set_user(user, regex=user_regex)
self.set_role(role, regex=role_regex)
self.set_type(type_, regex=type_regex)
self.set_range(range_, overlap=range_overlap, subset=range_subset,
superset=range_superset, proper=range_proper)
def results(self): def results(self):
"""Generator which yields all matching genfscons.""" """Generator which yields all matching genfscons."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("FS: {0.fs_cmp!r}, regex: {0.fs_regex}".format(self)) self.log.debug("FS: {0.fs!r}, regex: {0.fs_regex}".format(self))
self.log.debug("Path: {0.path_cmp!r}, regex: {0.path_regex}".format(self)) self.log.debug("Path: {0.path!r}, regex: {0.path_regex}".format(self))
self.log.debug("Filetype: {0.filetype!r}".format(self)) self.log.debug("Filetype: {0.filetype!r}".format(self))
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, " self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, "
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
for genfs in self.policy.genfscons(): for genfs in self.policy.genfscons():
if self.fs and not self._match_regex( if self.fs and not self._match_regex(
genfs.fs, genfs.fs,
self.fs_cmp, self.fs,
self.fs_regex): self.fs_regex):
continue continue
if self.path and not self._match_regex( if self.path and not self._match_regex(
genfs.path, genfs.path,
self.path_cmp, self.path,
self.path_regex): self.path_regex):
continue continue
if self.filetype and not self.filetype == genfs.filetype: if self.filetype and not self.filetype == genfs.filetype:
continue continue
if not self._match_context( if not self._match_context(genfs.context):
genfs.context,
self.user_cmp,
self.user_regex,
self.role_cmp,
self.role_regex,
self.type_cmp,
self.type_regex,
self.range_cmp,
self.range_subset,
self.range_overlap,
self.range_superset,
self.range_proper):
continue continue
yield genfs yield genfs
def set_fs(self, fs, **opts):
"""
Set the criteria for matching the file system type.
Parameter:
fs Name to match the file system.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.fs = fs
for k in list(opts.keys()):
if k == "regex":
self.fs_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.fs:
self.fs_cmp = None
elif self.fs_regex:
self.fs_cmp = re.compile(self.fs)
else:
self.fs_cmp = self.fs
def set_filetype(self, filetype):
"""
Set the criteria for matching the file type.
Parameter:
filetype File type to match (e.g. stat.S_IFBLK or stat.S_IFREG).
"""
self.filetype = filetype
def set_path(self, path, **opts):
"""
Set the criteria for matching the path.
Parameter:
path Criteria to match the path.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.path = path
for k in list(opts.keys()):
if k == "regex":
self.path_regex = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.path:
self.path_cmp = None
elif self.path_regex:
self.path_cmp = re.compile(self.path)
else:
self.path_cmp = self.path

View File

@ -23,6 +23,8 @@ from collections import namedtuple
import networkx as nx import networkx as nx
from networkx.exception import NetworkXError, NetworkXNoPath from networkx.exception import NetworkXError, NetworkXNoPath
from .descriptors import EdgeAttrIntMax, EdgeAttrList
__all__ = ['InfoFlowAnalysis'] __all__ = ['InfoFlowAnalysis']
# Return values for the analysis # Return values for the analysis
@ -36,7 +38,7 @@ class InfoFlowAnalysis(object):
"""Information flow analysis.""" """Information flow analysis."""
def __init__(self, policy, perm_map, minweight=1, exclude=None): def __init__(self, policy, perm_map, min_weight=1, exclude=None):
""" """
Parameters: Parameters:
policy The policy to analyze. policy The policy to analyze.
@ -50,59 +52,48 @@ class InfoFlowAnalysis(object):
self.policy = policy self.policy = policy
self.set_min_weight(minweight) self.min_weight = min_weight
self.set_perm_map(perm_map) self.perm_map = perm_map
self.set_exclude(exclude) self.exclude = exclude
self.rebuildgraph = True self.rebuildgraph = True
self.rebuildsubgraph = True self.rebuildsubgraph = True
self.G = nx.DiGraph() self.G = nx.DiGraph()
self.subG = None self.subG = None
def set_min_weight(self, weight): @property
""" def min_weight(self):
Set the minimum permission weight for the information flow analysis. return self._min_weight
Parameter: @min_weight.setter
weight Minimum permission weight (1-10) def min_weight(self, weight):
Exceptions:
ValueError The minimum weight is not 1-10.
"""
if not 1 <= weight <= 10: if not 1 <= weight <= 10:
raise ValueError( raise ValueError(
"Min information flow weight must be an integer 1-10.") "Min information flow weight must be an integer 1-10.")
self.minweight = weight self._min_weight = weight
self.rebuildsubgraph = True self.rebuildsubgraph = True
def set_perm_map(self, perm_map): @property
""" def perm_map(self):
Set the permission map used for the information flow analysis. return self._perm_map
Parameter:
perm_map The permission map.
Exceptions:
TypeError The map is not a file path or permission map object.
"""
self.perm_map = perm_map
@perm_map.setter
def perm_map(self, perm_map):
self._perm_map = perm_map
self.rebuildgraph = True self.rebuildgraph = True
self.rebuildsubgraph = True self.rebuildsubgraph = True
def set_exclude(self, exclude): @property
""" def exclude(self):
Set the types to exclude from the information flow analysis. return self._exclude
Parameter: @exclude.setter
exclude A list of types. def exclude(self, types):
""" if types:
self._exclude = [self.policy.lookup_type(t) for t in types]
if exclude:
self.exclude = [self.policy.lookup_type(t) for t in exclude]
else: else:
self.exclude = [] self._exclude = []
self.rebuildsubgraph = True self.rebuildsubgraph = True
@ -344,7 +335,7 @@ class InfoFlowAnalysis(object):
self.log.info("Building subgraph...") self.log.info("Building subgraph...")
self.log.debug("Excluding {0!r}".format(self.exclude)) self.log.debug("Excluding {0!r}".format(self.exclude))
self.log.debug("Min weight {0}".format(self.minweight)) self.log.debug("Min weight {0}".format(self.min_weight))
# delete excluded types from subgraph # delete excluded types from subgraph
nodes = [n for n in self.G.nodes() if n not in self.exclude] nodes = [n for n in self.G.nodes() if n not in self.exclude]
@ -353,11 +344,11 @@ class InfoFlowAnalysis(object):
# delete edges below minimum weight. # delete edges below minimum weight.
# no need if weight is 1, since that # no need if weight is 1, since that
# does not exclude any edges. # does not exclude any edges.
if self.minweight > 1: if self.min_weight > 1:
delete_list = [] delete_list = []
for s, t in self.subG.edges_iter(): for s, t in self.subG.edges_iter():
edge = Edge(self.subG, s, t) edge = Edge(self.subG, s, t)
if edge.weight < self.minweight: if edge.weight < self.min_weight:
delete_list.append(edge) delete_list.append(edge)
self.subG.remove_edges_from(delete_list) self.subG.remove_edges_from(delete_list)
@ -366,59 +357,6 @@ class InfoFlowAnalysis(object):
self.log.info("Completed building subgraph.") self.log.info("Completed building subgraph.")
class EdgeAttrList(object):
"""
A descriptor for edge attributes that are lists.
Parameter:
name The edge property name
"""
def __init__(self, propname):
self.name = propname
def __get__(self, obj, type=None):
return obj.G[obj.source][obj.target][self.name]
def __set__(self, obj, value):
# None is a special value to initialize
if value is None:
obj.G[obj.source][obj.target][self.name] = []
else:
raise ValueError("{0} lists should not be assigned directly".format(self.name))
def __delete__(self, obj):
# in Python3 a .clear() function was added for lists
# keep this implementation for Python 2 compat
del obj.G[obj.source][obj.target][self.name][:]
class EdgeAttrIntMax(object):
"""
A descriptor for edge attributes that are non-negative integers that always
keep the max assigned value until re-initialized.
Parameter:
name The edge property name
"""
def __init__(self, propname):
self.name = propname
def __get__(self, obj, type=None):
return obj.G[obj.source][obj.target][self.name]
def __set__(self, obj, value):
# None is a special value to initialize
if value is None:
obj.G[obj.source][obj.target][self.name] = 0
else:
current_value = obj.G[obj.source][obj.target][self.name]
obj.G[obj.source][obj.target][self.name] = max(current_value, value)
class Edge(object): class Edge(object):
""" """

View File

@ -24,79 +24,51 @@ from . import contextquery
class InitialSIDQuery(compquery.ComponentQuery, contextquery.ContextQuery): class InitialSIDQuery(compquery.ComponentQuery, contextquery.ContextQuery):
"""Initial SID (context) query.""" """
Initial SID (Initial context) query.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
user=None, user_regex=False,
role=None, role_regex=False,
type_=None, type_regex=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameters:
policy The policy to query.
user The criteria to match the context's user. Keyword Parameters/Class attributes:
user_regex If true, regular expression matching name The Initial SID name to match.
will be used on the user. name_regex If true, regular expression matching
role The criteria to match the context's role. will be used on the Initial SID name.
role_regex If true, regular expression matching user The criteria to match the context's user.
will be used on the role. user_regex If true, regular expression matching
type_ The criteria to match the context's type. will be used on the user.
type_regex If true, regular expression matching role The criteria to match the context's role.
will be used on the type. role_regex If true, regular expression matching
range_ The criteria to match the context's range. will be used on the role.
range_subset If true, the criteria will match if it is a subset type_ The criteria to match the context's type.
of the context's range. type_regex If true, regular expression matching
range_overlap If true, the criteria will match if it overlaps will be used on the type.
any of the context's range. range_ The criteria to match the context's range.
range_superset If true, the criteria will match if it is a superset range_subset If true, the criteria will match if it is a subset
of the context's range. of the context's range.
range_proper If true, use proper superset/subset operations. range_overlap If true, the criteria will match if it overlaps
No effect if not using set operations. any of the context's range.
""" range_superset If true, the criteria will match if it is a superset
self.log = logging.getLogger(self.__class__.__name__) of the context's range.
range_proper If true, use proper superset/subset operations.
self.policy = policy No effect if not using set operations.
"""
self.set_name(name, regex=name_regex)
self.set_user(user, regex=user_regex)
self.set_role(role, regex=role_regex)
self.set_type(type_, regex=type_regex)
self.set_range(range_, overlap=range_overlap, subset=range_subset,
superset=range_superset, proper=range_proper)
def results(self): def results(self):
"""Generator which yields all matching initial SIDs.""" """Generator which yields all matching initial SIDs."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, " self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, "
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
for i in self.policy.initialsids(): for i in self.policy.initialsids():
if self.name and not self._match_regex( if not self._match_name(i):
i,
self.name_cmp,
self.name_regex):
continue continue
if not self._match_context( if not self._match_context(i.context):
i.context,
self.user_cmp,
self.user_regex,
self.role_cmp,
self.role_regex,
self.type_cmp,
self.type_regex,
self.range_cmp,
self.range_subset,
self.range_overlap,
self.range_superset,
self.range_proper):
continue continue
yield i yield i

View File

@ -19,129 +19,73 @@
# pylint: disable=attribute-defined-outside-init,no-member # pylint: disable=attribute-defined-outside-init,no-member
import re import re
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor
class MatchAlias(object): class MatchAlias(object):
"""Mixin for matching an object's aliases.""" """Mixin for matching an object's aliases."""
def _match_alias(self, obj): alias = CriteriaDescriptor("alias_regex")
"""Match the object to the alias criteria.""" alias_regex = False
return self._match_in_set(obj, self.alias_cmp, self.alias_regex)
def set_alias(self, alias, **opts): def _match_alias(self, obj):
""" """
Set the criteria for the component's aliases. Match the alias criteria
Parameter: Parameter:
alias Name to match the component's aliases. obj An object with an alias generator method named "aliases"
Keyword Options:
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
""" """
self.alias = alias
for k in list(opts.keys()):
if k == "regex":
self.alias_regex = opts[k]
else:
raise NameError("Invalid alias option: {0}".format(k))
if not self.alias: if not self.alias:
self.alias_cmp = None # if there is no criteria, everything matches.
elif self.alias_regex: return True
self.alias_cmp = re.compile(self.alias)
else: return self._match_in_set(obj.aliases(), self.alias, self.alias_regex)
self.alias_cmp = self.alias
class MatchObjClass(object): class MatchObjClass(object):
"""Mixin for matching an object's class.""" """Mixin for matching an object's class."""
tclass = CriteriaSetDescriptor("tclass_regex", "lookup_class")
tclass_regex = False
def _match_object_class(self, obj): def _match_object_class(self, obj):
"""Match the object class criteria"""
if isinstance(self.tclass_cmp, set):
return obj in self.tclass_cmp
elif self.tclass_regex:
return bool(self.tclass_cmp.search(str(obj)))
else:
return obj == self.tclass_cmp
def set_tclass(self, tclass, **opts):
""" """
Set the object class(es) for the rule query. Match the object class criteria
Parameter: Parameter:
tclass The name of the object classes to match. obj An object with an object class attribute named "tclass"
This must be a string if regular expression
matching is used.
Keyword Options:
regex If true, use a regular expression for
matching the object class. If false, any
set intersection will match.
Exceptions:
NameError Invalid keyword option.
""" """
self.tclass = tclass
for k in list(opts.keys()):
if k == "regex":
self.tclass_regex = opts[k]
else:
raise NameError("Invalid object class option: {0}".format(k))
if not self.tclass: if not self.tclass:
self.tclass_cmp = None # if there is no criteria, everything matches.
return True
elif self.tclass_regex: elif self.tclass_regex:
self.tclass_cmp = re.compile(self.tclass) return bool(self.tclass.search(str(obj.tclass)))
elif isinstance(self.tclass, str):
self.tclass_cmp = self.policy.lookup_class(self.tclass)
else: else:
self.tclass_cmp = set(self.policy.lookup_class(c) for c in self.tclass) return obj.tclass in self.tclass
class MatchPermission(object): class MatchPermission(object):
"""Mixin for matching an object's permissions.""" """Mixin for matching an object's permissions."""
def _match_perms(self, obj): perms = CriteriaSetDescriptor("perms_regex")
"""Match the object to the permission criteria.""" perms_equal = False
return self._match_set(obj, self.perms_cmp, self.perms_equal) perms_regex = False
def set_perms(self, perms, **opts): def _match_perms(self, obj):
""" """
Set the permission set for the TE rule query. Match the permission criteria
Parameter: Parameter:
perms The permissions to match. obj An object with a permission set class attribute named "perms"
Options:
equal If true, the permission set of the rule
must equal the permissions criteria to
match. If false, permission in the critera
will cause a rule match.
Exceptions:
NameError Invalid permission set keyword option.
""" """
self.perms = perms
for k in list(opts.keys()):
if k == "equal":
self.perms_equal = opts[k]
else:
raise NameError("Invalid permission set option: {0}".format(k))
if not self.perms: if not self.perms:
self.perms_cmp = None # if there is no criteria, everything matches.
else: return True
self.perms_cmp = set(self.perms)
return self._match_regex_or_set(obj.perms, self.perms, self.perms_equal, self.perms_regex)

View File

@ -18,53 +18,52 @@
# #
import logging import logging
from . import rulequery from . import mixins, query
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor, RuletypeDescriptor
class MLSRuleQuery(rulequery.RuleQuery): class MLSRuleQuery(mixins.MatchObjClass, query.PolicyQuery):
"""Query MLS rules.""" """
Query MLS rules.
def __init__(self, policy, Parameter:
ruletype=None, policy The policy to query.
source=None, source_regex=False,
target=None, target_regex=False,
tclass=None, tclass_regex=False,
default=None, default_overlap=False, default_subset=False,
default_superset=False, default_proper=False):
"""
Parameters:
policy The policy to query.
ruletype The rule type(s) to match.
source The name of the source type/attribute to match.
source_regex If true, regular expression matching will
be used on the source type/attribute.
target The name of the target type/attribute to match.
target_regex If true, regular expression matching will
be used on the target type/attribute.
tclass The object class(es) to match.
tclass_regex If true, use a regular expression for
matching the rule's object class.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
ruletype The list of rule type(s) to match.
source The name of the source type/attribute to match.
source_regex If true, regular expression matching will
be used on the source type/attribute.
target The name of the target type/attribute to match.
target_regex If true, regular expression matching will
be used on the target type/attribute.
tclass The object class(es) to match.
tclass_regex If true, use a regular expression for
matching the rule's object class.
"""
self.set_ruletype(ruletype) ruletype = RuletypeDescriptor("validate_mls_ruletype")
self.set_source(source, regex=source_regex) source = CriteriaDescriptor("source_regex", "lookup_type_or_attr")
self.set_target(target, regex=target_regex) source_regex = False
self.set_tclass(tclass, regex=tclass_regex) target = CriteriaDescriptor("target_regex", "lookup_type_or_attr")
self.set_default(default, overlap=default_overlap, subset=default_subset, target_regex = False
superset=default_superset, proper=default_proper) tclass = CriteriaSetDescriptor("tclass_regex", "lookup_class")
tclass_regex = False
default = CriteriaDescriptor(lookup_function="lookup_range")
default_overlap = False
default_subset = False
default_superset = False
default_proper = False
def results(self): def results(self):
"""Generator which yields all matching MLS rules.""" """Generator which yields all matching MLS rules."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Ruletypes: {0.ruletype}".format(self)) self.log.debug("Ruletypes: {0.ruletype}".format(self))
self.log.debug("Source: {0.source_cmp!r}, regex: {0.source_regex}".format(self)) self.log.debug("Source: {0.source!r}, regex: {0.source_regex}".format(self))
self.log.debug("Target: {0.target_cmp!r}, regex: {0.target_regex}".format(self)) self.log.debug("Target: {0.target!r}, regex: {0.target_regex}".format(self))
self.log.debug("Class: {0.tclass_cmp!r}, regex: {0.tclass_regex}".format(self)) self.log.debug("Class: {0.tclass!r}, regex: {0.tclass_regex}".format(self))
self.log.debug("Default: {0.default_cmp!r}, overlap: {0.default_overlap}, " self.log.debug("Default: {0.default!r}, overlap: {0.default_overlap}, "
"subset: {0.default_subset}, superset: {0.default_superset}, " "subset: {0.default_subset}, superset: {0.default_superset}, "
"proper: {0.default_proper}".format(self)) "proper: {0.default_proper}".format(self))
@ -81,7 +80,7 @@ class MLSRuleQuery(rulequery.RuleQuery):
# #
if self.source and not self._match_regex( if self.source and not self._match_regex(
rule.source, rule.source,
self.source_cmp, self.source,
self.source_regex): self.source_regex):
continue continue
@ -90,14 +89,14 @@ class MLSRuleQuery(rulequery.RuleQuery):
# #
if self.target and not self._match_regex( if self.target and not self._match_regex(
rule.target, rule.target,
self.target_cmp, self.target,
self.target_regex): self.target_regex):
continue continue
# #
# Matching on object class # Matching on object class
# #
if self.tclass and not self._match_object_class(rule.tclass): if not self._match_object_class(rule):
continue continue
# #
@ -105,7 +104,7 @@ class MLSRuleQuery(rulequery.RuleQuery):
# #
if self.default and not self._match_range( if self.default and not self._match_range(
rule.default, rule.default,
self.default_cmp, self.default,
self.default_subset, self.default_subset,
self.default_overlap, self.default_overlap,
self.default_superset, self.default_superset,
@ -114,55 +113,3 @@ class MLSRuleQuery(rulequery.RuleQuery):
# if we get here, we have matched all available criteria # if we get here, we have matched all available criteria
yield rule yield rule
def set_ruletype(self, ruletype):
"""
Set the rule types for the rule query.
Parameter:
ruletype The rule types to match.
"""
if ruletype:
self.policy.validate_mls_ruletype(ruletype)
self.ruletype = ruletype
def set_default(self, default, **opts):
"""
Set the criteria for matching the rule's default range.
Parameter:
default Criteria to match the rule's default range.
Keyword Parameters:
subset If true, the criteria will match if it is a subset
of the rule's default range.
overlap If true, the criteria will match if it overlaps
any of the rule's default range.
superset If true, the criteria will match if it is a superset
of the rule's default range.
proper If true, use proper superset/subset operations.
No effect if not using set operations.
Exceptions:
NameError Invalid keyword option.
"""
self.default = default
for k in list(opts.keys()):
if k == "subset":
self.default_subset = opts[k]
elif k == "overlap":
self.default_overlap = opts[k]
elif k == "superset":
self.default_superset = opts[k]
elif k == "proper":
self.default_proper = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not self.default:
self.default_cmp = None
else:
self.default_cmp = self.policy.lookup_range(self.default)

View File

@ -24,79 +24,54 @@ from . import contextquery
class NetifconQuery(compquery.ComponentQuery, contextquery.ContextQuery): class NetifconQuery(compquery.ComponentQuery, contextquery.ContextQuery):
"""Network interface context query.""" """
Network interface context query.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
user=None, user_regex=False,
role=None, role_regex=False,
type_=None, type_regex=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameters:
policy The policy to query.
user The criteria to match the context's user. Keyword Parameters/Class attributes:
user_regex If true, regular expression matching name The name of the network interface to match.
will be used on the user. name_regex If true, regular expression matching will
role The criteria to match the context's role. be used for matching the name.
role_regex If true, regular expression matching user The criteria to match the context's user.
will be used on the role. user_regex If true, regular expression matching
type_ The criteria to match the context's type. will be used on the user.
type_regex If true, regular expression matching role The criteria to match the context's role.
will be used on the type. role_regex If true, regular expression matching
range_ The criteria to match the context's range. will be used on the role.
range_subset If true, the criteria will match if it is a subset type_ The criteria to match the context's type.
of the context's range. type_regex If true, regular expression matching
range_overlap If true, the criteria will match if it overlaps will be used on the type.
any of the context's range. range_ The criteria to match the context's range.
range_superset If true, the criteria will match if it is a superset range_subset If true, the criteria will match if it is a subset
of the context's range. of the context's range.
range_proper If true, use proper superset/subset operations. range_overlap If true, the criteria will match if it overlaps
No effect if not using set operations. any of the context's range.
""" range_superset If true, the criteria will match if it is a superset
self.log = logging.getLogger(self.__class__.__name__) of the context's range.
range_proper If true, use proper superset/subset operations.
self.policy = policy No effect if not using set operations.
"""
self.set_name(name, regex=name_regex)
self.set_user(user, regex=user_regex)
self.set_role(role, regex=role_regex)
self.set_type(type_, regex=type_regex)
self.set_range(range_, overlap=range_overlap, subset=range_subset,
superset=range_superset, proper=range_proper)
def results(self): def results(self):
"""Generator which yields all matching netifcons.""" """Generator which yields all matching netifcons."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, " self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, "
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
for netif in self.policy.netifcons(): for netif in self.policy.netifcons():
if self.name and not self._match_regex( if self.name and not self._match_regex(
netif.netif, netif.netif,
self.name_cmp, self.name,
self.name_regex): self.name_regex):
continue continue
if not self._match_context( if not self._match_context(netif.context):
netif.context,
self.user_cmp,
self.user_regex,
self.role_cmp,
self.role_regex,
self.type_cmp,
self.type_regex,
self.range_cmp,
self.range_subset,
self.range_overlap,
self.range_superset,
self.range_proper):
continue continue
yield netif yield netif

View File

@ -29,62 +29,82 @@ from . import contextquery
class NodeconQuery(contextquery.ContextQuery): class NodeconQuery(contextquery.ContextQuery):
"""Query nodecon statements.""" """
Query nodecon statements.
def __init__(self, policy, Parameter:
network=None, network_overlap=False, policy The policy to query.
ip_version=None,
user=None, user_regex=False,
role=None, role_regex=False,
type_=None, type_regex=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameters:
policy The policy to query.
network The network address. Keyword Parameters/Class attributes:
network_overlap If true, the net will match if it overlaps with network The IPv4/IPv6 address or IPv4/IPv6 network address
the nodecon's network instead of equality. with netmask, e.g. 192.168.1.0/255.255.255.0 or
user The criteria to match the context's user. "192.168.1.0/24".
user_regex If true, regular expression matching network_overlap If true, the net will match if it overlaps with
will be used on the user. the nodecon's network instead of equality.
role The criteria to match the context's role. ip_version The IP version of the nodecon to match. (socket.AF_INET
role_regex If true, regular expression matching for IPv4 or socket.AF_INET6 for IPv6)
will be used on the role. user The criteria to match the context's user.
type_ The criteria to match the context's type. user_regex If true, regular expression matching
type_regex If true, regular expression matching will be used on the user.
will be used on the type. role The criteria to match the context's role.
range_ The criteria to match the context's range. role_regex If true, regular expression matching
range_subset If true, the criteria will match if it is a subset will be used on the role.
of the context's range. type_ The criteria to match the context's type.
range_overlap If true, the criteria will match if it overlaps type_regex If true, regular expression matching
any of the context's range. will be used on the type.
range_superset If true, the criteria will match if it is a superset range_ The criteria to match the context's range.
of the context's range. range_subset If true, the criteria will match if it is a subset
range_proper If true, use proper superset/subset operations. of the context's range.
No effect if not using set operations. range_overlap If true, the criteria will match if it overlaps
""" any of the context's range.
self.log = logging.getLogger(self.__class__.__name__) range_superset If true, the criteria will match if it is a superset
of the context's range.
range_proper If true, use proper superset/subset operations.
No effect if not using set operations.
"""
self.policy = policy _network = None
network_overlap = False
_ip_version = None
self.set_network(network, overlap=network_overlap) @property
self.set_ip_version(ip_version) def ip_version(self):
self.set_user(user, regex=user_regex) return self._ip_version
self.set_role(role, regex=role_regex)
self.set_type(type_, regex=type_regex) @ip_version.setter
self.set_range(range_, overlap=range_overlap, subset=range_subset, def ip_version(self, value):
superset=range_superset, proper=range_proper) if value:
if not (value == AF_INET or value == AF_INET6):
raise ValueError(
"The address family must be {0} for IPv4 or {1} for IPv6.".
format(AF_INET, AF_INET6))
self._ip_version = value
else:
self._ip_version = None
@property
def network(self):
return self._network
@network.setter
def network(self, value):
if value:
try:
self._network = ipaddress.ip_network(value)
except NameError: # pragma: no cover
raise RuntimeError("Nodecon IP address/network functions require Python 3.3+.")
else:
self._network = None
def results(self): def results(self):
"""Generator which yields all matching nodecons.""" """Generator which yields all matching nodecons."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Network: {0.network!r}, overlap: {0.network_overlap}".format(self)) self.log.debug("Network: {0.network!r}, overlap: {0.network_overlap}".format(self))
self.log.debug("Ver: {0.version}".format(self)) self.log.debug("IP Version: {0.ip_version}".format(self))
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, " self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, "
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
@ -119,77 +139,10 @@ class NodeconQuery(contextquery.ContextQuery):
if not net == self.network: if not net == self.network:
continue continue
if self.version and self.version != nodecon.ip_version: if self.ip_version and self.ip_version != nodecon.ip_version:
continue continue
if not self._match_context( if not self._match_context(nodecon.context):
nodecon.context,
self.user_cmp,
self.user_regex,
self.role_cmp,
self.role_regex,
self.type_cmp,
self.type_regex,
self.range_cmp,
self.range_subset,
self.range_overlap,
self.range_superset,
self.range_proper):
continue continue
yield nodecon yield nodecon
def set_network(self, net, **opts):
"""
Set the criteria for matching the network.
Parameter:
net String IPv4/IPv6 address or IPv4/IPv6 network address
with netmask, e.g. 192.168.1.0/255.255.255.0 or
"192.168.1.0/24".
Keyword parameters:
overlap If true, the criteria will match if it overlaps with the
nodecon's network instead of equality.
Exceptions:
NameError Invalid keyword parameter.
"""
if net:
try:
self.network = ipaddress.ip_network(net)
except NameError: # pragma: no cover
raise RuntimeError("Nodecon IP address/network functions require Python 3.3+.")
else:
# ensure self.network is set
self.network = None
for k in list(opts.keys()):
if k == "overlap":
self.network_overlap = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
def set_ip_version(self, version):
"""
Set the criteria for matching the IP version.
Parameter:
version The address family to match. (socket.AF_INET for
IPv4 or socket.AF_INET6 for IPv6)
Exceptions:
ValueError Invalid address family number.
"""
if version:
if not (version == AF_INET or version == AF_INET6):
raise ValueError(
"The address family must be {0} for IPv4 or {1} for IPv6.".
format(AF_INET, AF_INET6))
self.version = version
else:
self.version = None

View File

@ -20,63 +20,63 @@ import logging
import re import re
from . import compquery from . import compquery
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor
from .policyrep.exception import NoCommon from .policyrep.exception import NoCommon
class ObjClassQuery(compquery.ComponentQuery): class ObjClassQuery(compquery.ComponentQuery):
"""Query object classes.""" """
Query object classes.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
common=None, common_regex=False,
perms=None, perms_equal=False, perms_regex=False,
perms_indirect=True):
"""
Parameters:
name The name of the object set to match.
name_regex If true, regular expression matching will
be used for matching the name.
common The name of the inherited common to match.
common_regex If true, regular expression matching will
be used for matching the common name.
perms The permissions to match.
perms_equal If true, only commons with permission sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
perms_regex If true, regular expression matching
will be used on the permission names instead
of set logic.
comparison will not be used.
perms_indirect If false, permissions inherited from a common
permission set not will be evaluated. Default
is true.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The name of the object set to match.
self.set_common(common, regex=common_regex) name_regex If true, regular expression matching will
self.set_perms(perms, regex=perms_regex, equal=perms_equal, indirect=perms_indirect) be used for matching the name.
common The name of the inherited common to match.
common_regex If true, regular expression matching will
be used for matching the common name.
perms The permissions to match.
perms_equal If true, only commons with permission sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
perms_regex If true, regular expression matching
will be used on the permission names instead
of set logic.
comparison will not be used.
perms_indirect If false, permissions inherited from a common
permission set not will be evaluated. Default
is true.
"""
common = CriteriaDescriptor("common_regex", "lookup_common")
common_regex = False
perms = CriteriaSetDescriptor("perms_regex")
perms_equal = False
perms_indirect = True
perms_regex = False
def results(self): def results(self):
"""Generator which yields all matching object classes.""" """Generator which yields all matching object classes."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Common: {0.common_cmp!r}, regex: {0.common_regex}".format(self)) self.log.debug("Common: {0.common!r}, regex: {0.common_regex}".format(self))
self.log.debug("Perms: {0.perms_cmp}, regex: {0.perms_regex}, " self.log.debug("Perms: {0.perms}, regex: {0.perms_regex}, "
"eq: {0.perms_equal}, indirect: {0.perms_indirect}".format(self)) "eq: {0.perms_equal}, indirect: {0.perms_indirect}".format(self))
for class_ in self.policy.classes(): for class_ in self.policy.classes():
if self.name and not self._match_name(class_): if not self._match_name(class_):
continue continue
if self.common: if self.common:
try: try:
if not self._match_regex( if not self._match_regex(
class_.common, class_.common,
self.common_cmp, self.common,
self.common_regex): self.common_regex):
continue continue
except NoCommon: except NoCommon:
@ -93,75 +93,9 @@ class ObjClassQuery(compquery.ComponentQuery):
if not self._match_regex_or_set( if not self._match_regex_or_set(
perms, perms,
self.perms_cmp, self.perms,
self.perms_equal, self.perms_equal,
self.perms_regex): self.perms_regex):
continue continue
yield class_ yield class_
def set_common(self, common, **opts):
"""
Set the criteria for matching the common's name.
Parameter:
name Name to match the common's name.
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.common = common
for k in list(opts.keys()):
if k == "regex":
self.common_regex = opts[k]
else:
raise NameError("Invalid common option: {0}".format(k))
if not self.common:
self.common_cmp = None
elif self.common_regex:
self.common_cmp = re.compile(self.common)
else:
self.common_cmp = self.policy.lookup_common(self.common)
def set_perms(self, perms, **opts):
"""
Set the criteria for the common's permissions.
Parameter:
perms Name to match the common's permissions.
Keyword Options:
regex If true, regular expression matching will be used.
equal If true, the permisison set of the common
must equal the permissions criteria to
match. If false, any intersection in the
critera will cause a match.
indirect If true, the permissions inherited from a common
permission set will be included.
Exceptions:
NameError Invalid keyword option.
"""
self.perms = perms
for k in list(opts.keys()):
if k == "regex":
self.perms_regex = opts[k]
elif k == "equal":
self.perms_equal = opts[k]
elif k == "indirect":
self.perms_indirect = opts[k]
else:
raise NameError("Invalid permissions option: {0}".format(k))
if not self.perms:
self.perms_cmp = None
elif self.perms_regex:
self.perms_cmp = re.compile(self.perms)
else:
self.perms_cmp = self.perms

View File

@ -23,28 +23,25 @@ from . import compquery
class PolCapQuery(compquery.ComponentQuery): class PolCapQuery(compquery.ComponentQuery):
"""Query SELinux policy capabilities""" """
Query SELinux policy capabilities
def __init__(self, policy, Parameter:
name=None, name_regex=False): policy The policy to query.
"""
Parameters:
name The name of the policy capability to match.
name_regex If true, regular expression matching will
be used for matching the name.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The name of the policy capability to match.
name_regex If true, regular expression matching will
be used for matching the name.
"""
def results(self): def results(self):
"""Generator which yields all matching policy capabilities.""" """Generator which yields all matching policy capabilities."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
for cap in self.policy.polcaps(): for cap in self.policy.polcaps():
if self.name and not self._match_name(cap): if not self._match_name(cap):
continue continue
yield cap yield cap

View File

@ -25,179 +25,122 @@ from .policyrep.netcontext import port_range
class PortconQuery(contextquery.ContextQuery): class PortconQuery(contextquery.ContextQuery):
"""Port context query.""" """
Port context query.
def __init__(self, policy, Parameter:
protocol=None, policy The policy to query.
ports=port_range(None, None), ports_subset=False, ports_overlap=False,
ports_superset=False, ports_proper=False,
user=None, user_regex=False,
role=None, role_regex=False,
type_=None, type_regex=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameters:
policy The policy to query.
Keyword Parameters: Keyword Parameters/Class attributes:
protocol The protocol to match (socket.IPPROTO_TCP for protocol The protocol to match (socket.IPPROTO_TCP for
TCP or socket.IPPROTO_UDP for UDP) TCP or socket.IPPROTO_UDP for UDP)
ports A 2-tuple of the port range to match. (Set both to ports A 2-tuple of the port range to match. (Set both to
the same value for a single port) the same value for a single port)
ports_subset If true, the criteria will match if it is a subset ports_subset If true, the criteria will match if it is a subset
of the portcon's range. of the portcon's range.
ports_overlap If true, the criteria will match if it overlaps ports_overlap If true, the criteria will match if it overlaps
any of the portcon's range. any of the portcon's range.
ports_superset If true, the criteria will match if it is a superset ports_superset If true, the criteria will match if it is a superset
of the portcon's range. of the portcon's range.
ports_proper If true, use proper superset/subset operations. ports_proper If true, use proper superset/subset operations.
No effect if not using set operations. No effect if not using set operations.
user The criteria to match the context's user. user The criteria to match the context's user.
user_regex If true, regular expression matching user_regex If true, regular expression matching
will be used on the user. will be used on the user.
role The criteria to match the context's role. role The criteria to match the context's role.
role_regex If true, regular expression matching role_regex If true, regular expression matching
will be used on the role. will be used on the role.
type_ The criteria to match the context's type. type_ The criteria to match the context's type.
type_regex If true, regular expression matching type_regex If true, regular expression matching
will be used on the type. will be used on the type.
range_ The criteria to match the context's range. range_ The criteria to match the context's range.
range_subset If true, the criteria will match if it is a subset range_subset If true, the criteria will match if it is a subset
of the context's range. of the context's range.
range_overlap If true, the criteria will match if it overlaps range_overlap If true, the criteria will match if it overlaps
any of the context's range. any of the context's range.
range_superset If true, the criteria will match if it is a superset range_superset If true, the criteria will match if it is a superset
of the context's range. of the context's range.
range_proper If true, use proper superset/subset operations. range_proper If true, use proper superset/subset operations.
No effect if not using set operations. No effect if not using set operations.
""" """
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy _protocol = None
_ports = None
ports_subset = False
ports_overlap = False
ports_superset = False
ports_proper = False
self.set_protocol(protocol) @property
self.set_ports(ports, subset=ports_subset, overlap=ports_overlap, def ports(self):
superset=ports_superset, proper=ports_proper) return self._ports
self.set_user(user, regex=user_regex)
self.set_role(role, regex=role_regex) @ports.setter
self.set_type(type_, regex=type_regex) def ports(self, value):
self.set_range(range_, overlap=range_overlap, subset=range_subset, pending_ports = port_range(*value)
superset=range_superset, proper=range_proper)
if all(pending_ports):
if pending_ports.low < 1 or pending_ports.high < 1:
raise ValueError("Port numbers must be positive: {0.low}-{0.high}".
format(pending_ports))
if pending_ports.low > pending_ports.high:
raise ValueError(
"The low port must be smaller than the high port: {0.low}-{0.high}".
format(pending_ports))
self._ports = pending_ports
else:
self._ports = None
@property
def protocol(self):
return self._protocol
@protocol.setter
def protocol(self, value):
if value:
if not (value == IPPROTO_TCP or value == IPPROTO_UDP):
raise ValueError(
"The protocol must be {0} for TCP or {1} for UDP.".
format(IPPROTO_TCP, IPPROTO_UDP))
self._protocol = value
else:
self._protocol = None
def results(self): def results(self):
"""Generator which yields all matching portcons.""" """Generator which yields all matching portcons."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Ports: {0.ports_cmp}, overlap: {0.ports_overlap}, " self.log.debug("Ports: {0.ports}, overlap: {0.ports_overlap}, "
"subset: {0.ports_subset}, superset: {0.ports_superset}, " "subset: {0.ports_subset}, superset: {0.ports_superset}, "
"proper: {0.ports_proper}".format(self)) "proper: {0.ports_proper}".format(self))
self.log.debug("User: {0.user_cmp!r}, regex: {0.user_regex}".format(self)) self.log.debug("User: {0.user!r}, regex: {0.user_regex}".format(self))
self.log.debug("Role: {0.role_cmp!r}, regex: {0.role_regex}".format(self)) self.log.debug("Role: {0.role!r}, regex: {0.role_regex}".format(self))
self.log.debug("Type: {0.type_cmp!r}, regex: {0.type_regex}".format(self)) self.log.debug("Type: {0.type_!r}, regex: {0.type_regex}".format(self))
self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, " self.log.debug("Range: {0.range_!r}, subset: {0.range_subset}, overlap: {0.range_overlap}, "
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
for portcon in self.policy.portcons(): for portcon in self.policy.portcons():
if all(self.ports): if self.ports and not self._match_range(
if not self._match_range( portcon.ports,
portcon.ports, self.ports,
self.ports_cmp, self.ports_subset,
self.ports_subset, self.ports_overlap,
self.ports_overlap, self.ports_superset,
self.ports_superset, self.ports_proper):
self.ports_proper): continue
continue
if self.protocol and self.protocol != portcon.protocol: if self.protocol and self.protocol != portcon.protocol:
continue continue
if not self._match_context( if not self._match_context(portcon.context):
portcon.context,
self.user_cmp,
self.user_regex,
self.role_cmp,
self.role_regex,
self.type_cmp,
self.type_regex,
self.range_cmp,
self.range_subset,
self.range_overlap,
self.range_superset,
self.range_proper):
continue continue
yield portcon yield portcon
def set_ports(self, ports, **opts):
"""
Set the criteria for matching the port range.
Parameter:
ports A 2-tuple of the port range to match. (Set both to
the same value to match a single port)
Keyword Parameters:
subset If true, the criteria will match if it is a subset
of the portcon's range.
overlap If true, the criteria will match if it overlaps
any of the portcon's range.
superset If true, the criteria will match if it is a superset
of the portcon's range.
proper If true, use proper superset/subset operations.
No effect if not using set operations.
"""
self.ports = ports
for k in list(opts.keys()):
if k == "subset":
self.ports_subset = opts[k]
elif k == "overlap":
self.ports_overlap = opts[k]
elif k == "superset":
self.ports_superset = opts[k]
elif k == "proper":
self.ports_proper = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
if not all(self.ports):
self.ports_cmp = None
else:
if self.ports[0] < 1 or self.ports[1] < 1:
raise ValueError("Port numbers must be positive: {0[0]}-{0[1]}".format(ports))
if self.ports[0] > self.ports[1]:
raise ValueError(
"The low port must be smaller than the high port: {0[0]}-{0[1]}".format(ports))
self.ports_cmp = port_range(*ports)
def set_protocol(self, protocol):
"""
Set the criteria for matching the IP protocol.
Parameter:
version The protocol number to match. (socket.IPPROTO_TCP for
TCP or socket.IPPROTO_UDP for UDP)
Exceptions:
ValueError Invalid protocol number.
"""
if protocol:
if not (protocol == IPPROTO_TCP or protocol == IPPROTO_UDP):
raise ValueError(
"The protocol must be {0} for TCP or {1} for UDP.".
format(IPPROTO_TCP, IPPROTO_UDP))
self.protocol = protocol
else:
self.protocol = None

View File

@ -16,11 +16,29 @@
# License along with SETools. If not, see # License along with SETools. If not, see
# <http://www.gnu.org/licenses/>. # <http://www.gnu.org/licenses/>.
# #
import logging
class PolicyQuery(object): class PolicyQuery(object):
"""Abstract base class for SELinux policy queries.""" """Base class for SELinux policy queries."""
def __init__(self, policy, **kwargs):
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy
# keys are sorted in reverse order so regex settings
# are set before the criteria, e.g. name_regex
# is set before name. This ensures correct behavior
# since the criteria descriptors are sensitve to
# regex settings.
for name in sorted(kwargs.keys(), reverse=True):
attr = getattr(self, name, None) # None is not callable
if callable(attr):
raise ValueError("Keyword parameter {0} conflicts with a callable.".format(name))
setattr(self, name, kwargs[name])
@staticmethod @staticmethod
def _match_regex(obj, criteria, regex): def _match_regex(obj, criteria, regex):
@ -72,6 +90,24 @@ class PolicyQuery(object):
else: else:
return criteria in obj return criteria in obj
@staticmethod
def _match_indirect_regex(obj, criteria, indirect, regex):
"""
Match the object with optional regular expression and indirection.
Parameters:
obj The object to match.
criteria The criteria to match.
regex If regular expression matching should be used.
indirect If object indirection should be used, e.g.
expanding an attribute.
"""
if indirect:
return PolicyQuery._match_in_set((obj.expand()), criteria, regex)
else:
return PolicyQuery._match_regex(obj, criteria, regex)
@staticmethod @staticmethod
def _match_regex_or_set(obj, criteria, equal, regex): def _match_regex_or_set(obj, criteria, equal, regex):
""" """

View File

@ -19,64 +19,79 @@
import logging import logging
import re import re
from . import mixins, query
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor, RuletypeDescriptor
from .policyrep.exception import InvalidType, RuleUseError from .policyrep.exception import InvalidType, RuleUseError
from . import rulequery
class RBACRuleQuery(mixins.MatchObjClass, query.PolicyQuery):
class RBACRuleQuery(rulequery.RuleQuery): """
Query the RBAC rules.
"""Query the RBAC rules.""" Parameter:
policy The policy to query.
def __init__(self, policy, Keyword Parameters/Class attributes:
ruletype=None, ruletype The list of rule type(s) to match.
source=None, source_regex=False, source_indirect=True, source The name of the source role/attribute to match.
target=None, target_regex=False, target_indirect=True, source_indirect If true, members of an attribute will be
tclass=None, tclass_regex=False, matched rather than the attribute itself.
default=None, default_regex=False): source_regex If true, regular expression matching will
""" be used on the source role/attribute.
Parameters: Obeys the source_indirect option.
policy The policy to query. target The name of the target role/attribute to match.
ruletype The rule type(s) to match. target_indirect If true, members of an attribute will be
source The name of the source role/attribute to match. matched rather than the attribute itself.
source_indirect If true, members of an attribute will be target_regex If true, regular expression matching will
matched rather than the attribute itself. be used on the target role/attribute.
source_regex If true, regular expression matching will Obeys target_indirect option.
be used on the source role/attribute. tclass The object class(es) to match.
Obeys the source_indirect option. tclass_regex If true, use a regular expression for
target The name of the target role/attribute to match. matching the rule's object class.
target_indirect If true, members of an attribute will be default The name of the default role to match.
matched rather than the attribute itself. default_regex If true, regular expression matching will
target_regex If true, regular expression matching will be used on the default role.
be used on the target role/attribute. """
Obeys target_indirect option.
tclass The object class(es) to match.
tclass_regex If true, use a regular expression for
matching the rule's object class.
default The name of the default role to match.
default_regex If true, regular expression matching will
be used on the default role.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy ruletype = RuletypeDescriptor("validate_rbac_ruletype")
source = CriteriaDescriptor("source_regex", "lookup_role")
source_regex = False
source_indirect = True
_target = None
target_regex = False
target_indirect = True
tclass = CriteriaSetDescriptor("tclass_regex", "lookup_class")
tclass_regex = False
default = CriteriaDescriptor("default_regex", "lookup_role")
default_regex = False
self.set_ruletype(ruletype) @property
self.set_source(source, indirect=source_indirect, regex=source_regex) def target(self):
self.set_target(target, indirect=target_indirect, regex=target_regex) return self._target
self.set_tclass(tclass, regex=tclass_regex)
self.set_default(default, regex=default_regex) @target.setter
def target(self, value):
if not value:
self._target = None
elif self.target_regex:
self._target = re.compile(value)
else:
try:
self._target = self.policy.lookup_type_or_attr(value)
except InvalidType:
self._target = self.policy.lookup_role(value)
def results(self): def results(self):
"""Generator which yields all matching RBAC rules.""" """Generator which yields all matching RBAC rules."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Ruletypes: {0.ruletype}".format(self)) self.log.debug("Ruletypes: {0.ruletype}".format(self))
self.log.debug("Source: {0.source_cmp!r}, indirect: {0.source_indirect}, " self.log.debug("Source: {0.source!r}, indirect: {0.source_indirect}, "
"regex: {0.source_regex}".format(self)) "regex: {0.source_regex}".format(self))
self.log.debug("Target: {0.target_cmp!r}, indirect: {0.target_indirect}, " self.log.debug("Target: {0.target!r}, indirect: {0.target_indirect}, "
"regex: {0.target_regex}".format(self)) "regex: {0.target_regex}".format(self))
self.log.debug("Class: {0.tclass_cmp!r}, regex: {0.tclass_regex}".format(self)) self.log.debug("Class: {0.tclass!r}, regex: {0.tclass_regex}".format(self))
self.log.debug("Default: {0.default_cmp!r}, regex: {0.default_regex}".format(self)) self.log.debug("Default: {0.default!r}, regex: {0.default_regex}".format(self))
for rule in self.policy.rbacrules(): for rule in self.policy.rbacrules():
# #
@ -91,7 +106,7 @@ class RBACRuleQuery(rulequery.RuleQuery):
# #
if self.source and not self._match_indirect_regex( if self.source and not self._match_indirect_regex(
rule.source, rule.source,
self.source_cmp, self.source,
self.source_indirect, self.source_indirect,
self.source_regex): self.source_regex):
continue continue
@ -101,7 +116,7 @@ class RBACRuleQuery(rulequery.RuleQuery):
# #
if self.target and not self._match_indirect_regex( if self.target and not self._match_indirect_regex(
rule.target, rule.target,
self.target_cmp, self.target,
self.target_indirect, self.target_indirect,
self.target_regex): self.target_regex):
continue continue
@ -109,12 +124,11 @@ class RBACRuleQuery(rulequery.RuleQuery):
# #
# Matching on object class # Matching on object class
# #
if self.tclass: try:
try: if not self._match_object_class(rule):
if not self._match_object_class(rule.tclass):
continue
except RuleUseError:
continue continue
except RuleUseError:
continue
# #
# Matching on default role # Matching on default role
@ -123,7 +137,7 @@ class RBACRuleQuery(rulequery.RuleQuery):
try: try:
if not self._match_regex( if not self._match_regex(
rule.default, rule.default,
self.default_cmp, self.default,
self.default_regex): self.default_regex):
continue continue
except RuleUseError: except RuleUseError:
@ -131,115 +145,3 @@ class RBACRuleQuery(rulequery.RuleQuery):
# if we get here, we have matched all available criteria # if we get here, we have matched all available criteria
yield rule yield rule
def set_ruletype(self, ruletype):
"""
Set the rule types for the rule query.
Parameter:
ruletype The rule types to match.
"""
if ruletype:
self.policy.validate_rbac_ruletype(ruletype)
self.ruletype = ruletype
def set_source(self, source, **opts):
"""
Set the criteria for the rule's source.
Parameter:
source Name to match the rule's source.
Keyword Options:
indirect If true, members of an attribute will be
matched rather than the attribute itself.
regex If true, regular expression matching will
be used. Obeys the indirect option.
Exceptions:
NameError Invalid keyword option.
"""
self.source = source
for k in list(opts.keys()):
if k == "indirect":
self.source_indirect = opts[k]
elif k == "regex":
self.source_regex = opts[k]
else:
raise NameError("Invalid source option: {0}".format(k))
if not self.source:
self.source_cmp = None
elif self.source_regex:
self.source_cmp = re.compile(self.source)
else:
self.source_cmp = self.policy.lookup_role(self.source)
def set_target(self, target, **opts):
"""
Set the criteria for the rule's target.
Parameter:
target Name to match the rule's target.
Keyword Options:
indirect If true, members of an attribute will be
matched rather than the attribute itself.
regex If true, regular expression matching will
be used. Obeys the indirect option.
Exceptions:
NameError Invalid keyword option.
"""
self.target = target
for k in list(opts.keys()):
if k == "indirect":
self.target_indirect = opts[k]
elif k == "regex":
self.target_regex = opts[k]
else:
raise NameError("Invalid target option: {0}".format(k))
if not self.target:
self.target_cmp = None
elif self.target_regex:
self.target_cmp = re.compile(self.target)
else:
try:
self.target_cmp = self.policy.lookup_type_or_attr(self.target)
except InvalidType:
self.target_cmp = self.policy.lookup_role(self.target)
def set_default(self, default, **opts):
"""
Set the criteria for the rule's default role.
Parameter:
default Name to match the rule's default role.
Keyword Options:
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.default = default
for k in list(opts.keys()):
if k == "regex":
self.default_regex = opts[k]
else:
raise NameError("Invalid default option: {0}".format(k))
if not self.default:
self.default_cmp = None
elif self.default_regex:
self.default_cmp = re.compile(self.default)
else:
self.default_cmp = self.policy.lookup_role(self.default)

View File

@ -20,41 +20,40 @@ import logging
import re import re
from . import compquery from . import compquery
from .descriptors import CriteriaSetDescriptor
class RoleQuery(compquery.ComponentQuery): class RoleQuery(compquery.ComponentQuery):
"""Query SELinux policy roles.""" """
Query SELinux policy roles.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
types=None, types_equal=False, types_regex=False):
"""
Parameter:
policy The policy to query.
name The role name to match.
name_regex If true, regular expression matching
will be used on the role names.
types The type to match.
types_equal If true, only roles with type sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
types_regex If true, regular expression matching
will be used on the type names instead
of set logic.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The role name to match.
self.set_types(types, regex=types_regex, equal=types_equal) name_regex If true, regular expression matching
will be used on the role names.
types The type to match.
types_equal If true, only roles with type sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
types_regex If true, regular expression matching
will be used on the type names instead
of set logic.
"""
types = CriteriaSetDescriptor("types_regex", "lookup_type")
types_equal = False
types_regex = False
def results(self): def results(self):
"""Generator which yields all matching roles.""" """Generator which yields all matching roles."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Types: {0.types_cmp!r}, regex: {0.types_regex}, " self.log.debug("Types: {0.types!r}, regex: {0.types_regex}, "
"eq: {0.types_equal}".format(self)) "eq: {0.types_equal}".format(self))
for r in self.policy.roles(): for r in self.policy.roles():
@ -65,50 +64,14 @@ class RoleQuery(compquery.ComponentQuery):
# will confuse, especially for set equality type queries. # will confuse, especially for set equality type queries.
continue continue
if self.name and not self._match_name(r): if not self._match_name(r):
continue continue
if self.types and not self._match_regex_or_set( if self.types and not self._match_regex_or_set(
set(r.types()), set(r.types()),
self.types_cmp, self.types,
self.types_equal, self.types_equal,
self.types_regex): self.types_regex):
continue continue
yield r yield r
def set_types(self, types, **opts):
"""
Set the criteria for the role's types.
Parameter:
types Name to match the role's types.
Keyword Options:
regex If true, regular expression matching will be used
instead of set logic.
equal If true, the type set of the role
must equal the attributes criteria to
match. If false, any intersection in the
critera will cause a rule match.
Exceptions:
NameError Invalid keyword option.
"""
self.types = types
for k in list(opts.keys()):
if k == "regex":
self.types_regex = opts[k]
elif k == "equal":
self.types_equal = opts[k]
else:
raise NameError("Invalid types option: {0}".format(k))
if not self.types:
self.types_cmp = None
elif self.types_regex:
self.types_cmp = re.compile(self.types)
else:
self.types_cmp = set(self.policy.lookup_type(t) for t in self.types)

View File

@ -1,126 +0,0 @@
# Copyright 2014-2015, Tresys Technology, LLC
#
# This file is part of SETools.
#
# SETools is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1 of
# the License, or (at your option) any later version.
#
# SETools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SETools. If not, see
# <http://www.gnu.org/licenses/>.
#
# pylint: disable=no-member,attribute-defined-outside-init,abstract-method
import re
from . import mixins
from .query import PolicyQuery
class RuleQuery(mixins.MatchObjClass, PolicyQuery):
"""Abstract base class for rule queries."""
@staticmethod
def _match_indirect_regex(obj, criteria, indirect, regex):
"""
Match the object with optional regular expression and indirection.
Parameters:
obj The object to match.
criteria The criteria to match.
regex If regular expression matching should be used.
indirect If object indirection should be used, e.g.
expanding an attribute.
"""
if indirect:
return PolicyQuery._match_in_set(
(obj.expand()),
criteria,
regex)
else:
return PolicyQuery._match_regex(
obj,
criteria,
regex)
def set_ruletype(self, ruletype):
raise NotImplementedError
def set_source(self, source, **opts):
"""
Set the criteria for the rule's source.
Parameter:
source Name to match the rule's source.
Keyword Options:
indirect If true, members of an attribute will be
matched rather than the attribute itself.
regex If true, regular expression matching will
be used. Obeys the indirect option.
Exceptions:
NameError Invalid keyword option.
"""
self.source = source
for k in list(opts.keys()):
if k == "indirect":
self.source_indirect = opts[k]
elif k == "regex":
self.source_regex = opts[k]
else:
raise NameError("Invalid source option: {0}".format(k))
if not self.source:
self.source_cmp = None
elif self.source_regex:
self.source_cmp = re.compile(self.source)
else:
self.source_cmp = self.policy.lookup_type_or_attr(self.source)
def set_target(self, target, **opts):
"""
Set the criteria for the rule's target.
Parameter:
target Name to match the rule's target.
Keyword Options:
indirect If true, members of an attribute will be
matched rather than the attribute itself.
regex If true, regular expression matching will
be used. Obeys the indirect option.
Exceptions:
NameError Invalid keyword option.
"""
self.target = target
for k in list(opts.keys()):
if k == "indirect":
self.target_indirect = opts[k]
elif k == "regex":
self.target_regex = opts[k]
else:
raise NameError("Invalid target option: {0}".format(k))
if not self.target:
self.target_cmp = None
elif self.target_regex:
self.target_cmp = re.compile(self.target)
else:
self.target_cmp = self.policy.lookup_type_or_attr(self.target)
def set_default(self, default, **opts):
raise NotImplementedError

View File

@ -20,49 +20,47 @@ import logging
from . import compquery from . import compquery
from . import mixins from . import mixins
from .descriptors import CriteriaDescriptor
class SensitivityQuery(mixins.MatchAlias, compquery.ComponentQuery): class SensitivityQuery(mixins.MatchAlias, compquery.ComponentQuery):
"""Query MLS Sensitivities""" """
Query MLS Sensitivities
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
alias=None, alias_regex=False,
sens=None, sens_dom=False, sens_domby=False):
"""
Parameters:
name The name of the category to match.
name_regex If true, regular expression matching will
be used for matching the name.
alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
sens The criteria to match the sensitivity by dominance.
sens_dom If true, the criteria will match if it dominates
the sensitivity.
sens_domby If true, the criteria will match if it is dominated
by the sensitivity.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The name of the category to match.
self.set_alias(alias, regex=alias_regex) name_regex If true, regular expression matching will
self.set_sensitivity(sens, dom=sens_dom, domby=sens_domby) be used for matching the name.
alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
sens The criteria to match the sensitivity by dominance.
sens_dom If true, the criteria will match if it dominates
the sensitivity.
sens_domby If true, the criteria will match if it is dominated
by the sensitivity.
"""
sens = CriteriaDescriptor(lookup_function="lookup_sensitivity")
sens_dom = False
sens_domby = False
def results(self): def results(self):
"""Generator which yields all matching sensitivities.""" """Generator which yields all matching sensitivities."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Alias: {0.alias_cmp}, regex: {0.alias_regex}".format(self)) self.log.debug("Alias: {0.alias}, regex: {0.alias_regex}".format(self))
self.log.debug("Sens: {0.sens!r}, dom: {0.sens_dom}, domby: {0.sens_domby}".format(self)) self.log.debug("Sens: {0.sens!r}, dom: {0.sens_dom}, domby: {0.sens_domby}".format(self))
for s in self.policy.sensitivities(): for s in self.policy.sensitivities():
if self.name and not self._match_name(s): if not self._match_name(s):
continue continue
if self.alias and not self._match_alias(s.aliases()): if not self._match_alias(s):
continue continue
if self.sens and not self._match_level( if self.sens and not self._match_level(
@ -74,33 +72,3 @@ class SensitivityQuery(mixins.MatchAlias, compquery.ComponentQuery):
continue continue
yield s yield s
def set_sensitivity(self, sens, **opts):
"""
Set the criteria for matching the sensitivity by dominance.
Parameter:
sens Criteria to match the sensitivity.
Keyword Parameters:
dom If true, the criteria will match if it
dominates the sensitivity.
domby If true, the criteria will match if it
is dominated by the sensitivity.
Exceptions:
NameError Invalid keyword option.
"""
if sens:
self.sens = self.policy.lookup_sensitivity(sens)
else:
self.sens = None
for k in list(opts.keys()):
if k == "dom":
self.sens_dom = opts[k]
elif k == "domby":
self.sens_domby = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))

View File

@ -19,75 +19,89 @@
import logging import logging
import re import re
from . import mixins, query
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor, RuletypeDescriptor
from .policyrep.exception import RuleUseError, RuleNotConditional from .policyrep.exception import RuleUseError, RuleNotConditional
from . import mixins
from . import rulequery
class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery): class TERuleQuery(mixins.MatchObjClass, mixins.MatchPermission, query.PolicyQuery):
"""Query the Type Enforcement rules.""" """
Query the Type Enforcement rules.
def __init__(self, policy, Parameter:
ruletype=None, policy The policy to query.
source=None, source_regex=False, source_indirect=True,
target=None, target_regex=False, target_indirect=True,
tclass=None, tclass_regex=False,
perms=None, perms_equal=False,
default=None, default_regex=False,
boolean=None, boolean_regex=False, boolean_equal=False):
"""
Parameter:
policy The policy to query.
ruletype The rule type(s) to match.
source The name of the source type/attribute to match.
source_indirect If true, members of an attribute will be
matched rather than the attribute itself.
source_regex If true, regular expression matching will
be used on the source type/attribute.
Obeys the source_indirect option.
target The name of the target type/attribute to match.
target_indirect If true, members of an attribute will be
matched rather than the attribute itself.
target_regex If true, regular expression matching will
be used on the target type/attribute.
Obeys target_indirect option.
tclass The object class(es) to match.
tclass_regex If true, use a regular expression for
matching the rule's object class.
perms The permission(s) to match.
perms_equal If true, the permission set of the rule
must exactly match the permissions
criteria. If false, any set intersection
will match.
default The name of the default type to match.
default_regex If true, regular expression matching will be
used on the default type.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
ruletype The list of rule type(s) to match.
source The name of the source type/attribute to match.
source_indirect If true, members of an attribute will be
matched rather than the attribute itself.
Default is true.
source_regex If true, regular expression matching will
be used on the source type/attribute.
Obeys the source_indirect option.
Default is false.
target The name of the target type/attribute to match.
target_indirect If true, members of an attribute will be
matched rather than the attribute itself.
Default is true.
target_regex If true, regular expression matching will
be used on the target type/attribute.
Obeys target_indirect option.
Default is false.
tclass The object class(es) to match.
tclass_regex If true, use a regular expression for
matching the rule's object class.
Default is false.
perms The set of permission(s) to match.
perms_equal If true, the permission set of the rule
must exactly match the permissions
criteria. If false, any set intersection
will match.
Default is false.
perms_regex If true, regular expression matching will be used
on the permission names instead of set logic.
default The name of the default type to match.
default_regex If true, regular expression matching will be
used on the default type.
Default is false.
boolean The set of boolean(s) to match.
boolean_regex If true, regular expression matching will be
used on the booleans.
Default is false.
boolean_equal If true, the booleans in the conditional
expression of the rule must exactly match the
criteria. If false, any set intersection
will match. Default is false.
"""
self.set_ruletype(ruletype) ruletype = RuletypeDescriptor("validate_te_ruletype")
self.set_source(source, indirect=source_indirect, regex=source_regex) source = CriteriaDescriptor("source_regex", "lookup_type_or_attr")
self.set_target(target, indirect=target_indirect, regex=target_regex) source_regex = False
self.set_tclass(tclass, regex=tclass_regex) source_indirect = True
self.set_perms(perms, equal=perms_equal) target = CriteriaDescriptor("target_regex", "lookup_type_or_attr")
self.set_default(default, regex=default_regex) target_regex = False
self.set_boolean(boolean, regex=boolean_regex, equal=boolean_equal) target_indirect = True
default = CriteriaDescriptor("default_regex", "lookup_type")
default_regex = False
boolean = CriteriaSetDescriptor("boolean_regex", "lookup_boolean")
boolean_regex = False
boolean_equal = False
def results(self): def results(self):
"""Generator which yields all matching TE rules.""" """Generator which yields all matching TE rules."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Ruletypes: {0.ruletype}".format(self)) self.log.debug("Ruletypes: {0.ruletype}".format(self))
self.log.debug("Source: {0.source_cmp!r}, indirect: {0.source_indirect}, " self.log.debug("Source: {0.source!r}, indirect: {0.source_indirect}, "
"regex: {0.source_regex}".format(self)) "regex: {0.source_regex}".format(self))
self.log.debug("Target: {0.target_cmp!r}, indirect: {0.target_indirect}, " self.log.debug("Target: {0.target!r}, indirect: {0.target_indirect}, "
"regex: {0.target_regex}".format(self)) "regex: {0.target_regex}".format(self))
self.log.debug("Class: {0.tclass_cmp!r}, regex: {0.tclass_regex}".format(self)) self.log.debug("Class: {0.tclass!r}, regex: {0.tclass_regex}".format(self))
self.log.debug("Perms: {0.perms_cmp}, eq: {0.perms_equal}".format(self)) self.log.debug("Perms: {0.perms!r}, regex: {0.perms_regex}, eq: {0.perms_equal}".
self.log.debug("Default: {0.default_cmp!r}, regex: {0.default_regex}".format(self)) format(self))
self.log.debug("Boolean: {0.boolean_cmp!r}, eq: {0.boolean_equal}, " self.log.debug("Default: {0.default!r}, regex: {0.default_regex}".format(self))
self.log.debug("Boolean: {0.boolean!r}, eq: {0.boolean_equal}, "
"regex: {0.boolean_regex}".format(self)) "regex: {0.boolean_regex}".format(self))
for rule in self.policy.terules(): for rule in self.policy.terules():
@ -103,7 +117,7 @@ class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery):
# #
if self.source and not self._match_indirect_regex( if self.source and not self._match_indirect_regex(
rule.source, rule.source,
self.source_cmp, self.source,
self.source_indirect, self.source_indirect,
self.source_regex): self.source_regex):
continue continue
@ -113,7 +127,7 @@ class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery):
# #
if self.target and not self._match_indirect_regex( if self.target and not self._match_indirect_regex(
rule.target, rule.target,
self.target_cmp, self.target,
self.target_indirect, self.target_indirect,
self.target_regex): self.target_regex):
continue continue
@ -121,18 +135,17 @@ class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery):
# #
# Matching on object class # Matching on object class
# #
if self.tclass and not self._match_object_class(rule.tclass): if not self._match_object_class(rule):
continue continue
# #
# Matching on permission set # Matching on permission set
# #
if self.perms: try:
try: if not self._match_perms(rule):
if not self._match_perms(rule.perms):
continue
except RuleUseError:
continue continue
except RuleUseError:
continue
# #
# Matching on default type # Matching on default type
@ -141,7 +154,7 @@ class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery):
try: try:
if not self._match_regex( if not self._match_regex(
rule.default, rule.default,
self.default_cmp, self.default,
self.default_regex): self.default_regex):
continue continue
except RuleUseError: except RuleUseError:
@ -154,7 +167,7 @@ class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery):
try: try:
if not self._match_regex_or_set( if not self._match_regex_or_set(
rule.conditional.booleans, rule.conditional.booleans,
self.boolean_cmp, self.boolean,
self.boolean_equal, self.boolean_equal,
self.boolean_regex): self.boolean_regex):
continue continue
@ -163,76 +176,3 @@ class TERuleQuery(mixins.MatchPermission, rulequery.RuleQuery):
# if we get here, we have matched all available criteria # if we get here, we have matched all available criteria
yield rule yield rule
def set_boolean(self, boolean, **opts):
"""
Set the Boolean for the TE rule query.
Parameter:
boolean The Boolean names to match in the TE rule
conditional expression.
Options:
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid permission set keyword option.
"""
self.boolean = boolean
for k in list(opts.keys()):
if k == "regex":
self.boolean_regex = opts[k]
elif k == "equal":
self.boolean_equal = opts[k]
else:
raise NameError("Invalid permission set option: {0}".format(k))
if not self.boolean:
self.boolean_cmp = None
elif self.boolean_regex:
self.boolean_cmp = re.compile(self.boolean)
else:
self.boolean_cmp = set(self.policy.lookup_boolean(b) for b in self.boolean)
def set_ruletype(self, ruletype):
"""
Set the rule types for the rule query.
Parameter:
ruletype The rule types to match.
"""
if ruletype:
self.policy.validate_te_ruletype(ruletype)
self.ruletype = ruletype
def set_default(self, default, **opts):
"""
Set the criteria for the rule's default type.
Parameter:
default Name to match the rule's default type.
Keyword Options:
regex If true, regular expression matching will be used.
Exceptions:
NameError Invalid keyword option.
"""
self.default = default
for k in list(opts.keys()):
if k == "regex":
self.default_regex = opts[k]
else:
raise NameError("Invalid default option: {0}".format(k))
if not self.default:
self.default_cmp = None
elif self.default_regex:
self.default_cmp = re.compile(self.default)
else:
self.default_cmp = self.policy.lookup_type(self.default)

View File

@ -20,88 +20,51 @@ import logging
import re import re
from . import compquery from . import compquery
from .descriptors import CriteriaSetDescriptor
class TypeAttributeQuery(compquery.ComponentQuery): class TypeAttributeQuery(compquery.ComponentQuery):
"""Query SELinux policy type attributes.""" """
Query SELinux policy type attributes.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
types=None, types_equal=False, types_regex=False):
"""
Parameter:
policy The policy to query.
name The type name to match.
name_regex If true, regular expression matching
will be used on the type names.
types The type to match.
types_equal If true, only attributes with type sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
types_regex If true, regular expression matching
will be used on the type names instead
of set logic.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The type name to match.
self.set_types(types, regex=types_regex, equal=types_equal) name_regex If true, regular expression matching
will be used on the type names.
types The type to match.
types_equal If true, only attributes with type sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
types_regex If true, regular expression matching
will be used on the type names instead
of set logic.
"""
types = CriteriaSetDescriptor("types_regex", "lookup_type")
types_equal = False
types_regex = False
def results(self): def results(self):
"""Generator which yields all matching types.""" """Generator which yields all matching types."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Types: {0.types_cmp!r}, regex: {0.types_regex}, " self.log.debug("Types: {0.types!r}, regex: {0.types_regex}, "
"eq: {0.types_equal}".format(self)) "eq: {0.types_equal}".format(self))
for attr in self.policy.typeattributes(): for attr in self.policy.typeattributes():
if self.name and not self._match_name(attr): if not self._match_name(attr):
continue continue
if self.types and not self._match_regex_or_set( if self.types and not self._match_regex_or_set(
set(attr.expand()), set(attr.expand()),
self.types_cmp, self.types,
self.types_equal, self.types_equal,
self.types_regex): self.types_regex):
continue continue
yield attr yield attr
def set_types(self, types, **opts):
"""
Set the criteria for the attribute's types.
Parameter:
alias Name to match the component's types.
Keyword Options:
regex If true, regular expression matching will be used
instead of set logic.
equal If true, the type set of the attribute
must equal the type criteria to
match. If false, any intersection in the
critera will cause a rule match.
Exceptions:
NameError Invalid keyword option.
"""
self.types = types
for k in list(opts.keys()):
if k == "regex":
self.types_regex = opts[k]
elif k == "equal":
self.types_equal = opts[k]
else:
raise NameError("Invalid types option: {0}".format(k))
if not self.types:
self.types_cmp = None
elif self.types_regex:
self.types_cmp = re.compile(self.types)
else:
self.types_cmp = set(self.policy.lookup_type(t) for t in self.types)

View File

@ -21,126 +21,76 @@ import re
from . import compquery from . import compquery
from . import mixins from . import mixins
from .descriptors import CriteriaSetDescriptor
class TypeQuery(mixins.MatchAlias, compquery.ComponentQuery): class TypeQuery(mixins.MatchAlias, compquery.ComponentQuery):
"""Query SELinux policy types.""" """
Query SELinux policy types.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
alias=None, alias_regex=False,
attrs=None, attrs_equal=False, attrs_regex=False,
permissive=False, match_permissive=False):
"""
Parameter:
policy The policy to query.
name The type name to match.
name_regex If true, regular expression matching
will be used on the type names.
alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
attrs The attribute to match.
attrs_equal If true, only types with attribute sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
attrs_regex If true, regular expression matching
will be used on the attribute names instead
of set logic.
match_permissive If true, the permissive state will be matched.
permissive The permissive state to match.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The type name to match.
self.set_alias(alias, regex=alias_regex) name_regex If true, regular expression matching
self.set_attrs(attrs, regex=attrs_regex, equal=attrs_equal) will be used on the type names.
self.set_permissive(match_permissive, permissive=permissive) alias The alias name to match.
alias_regex If true, regular expression matching
will be used on the alias names.
attrs The attribute to match.
attrs_equal If true, only types with attribute sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
attrs_regex If true, regular expression matching
will be used on the attribute names instead
of set logic.
permissive The permissive state to match. If this
is None, the state is not matched.
"""
attrs = CriteriaSetDescriptor("attrs_regex", "lookup_typeattr")
attrs_regex = False
attrs_equal = False
_permissive = None
@property
def permissive(self):
return self._permissive
@permissive.setter
def permissive(self, value):
if value is None:
self._permissive = None
else:
self._permissive = bool(value)
def results(self): def results(self):
"""Generator which yields all matching types.""" """Generator which yields all matching types."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Alias: {0.alias_cmp}, regex: {0.alias_regex}".format(self)) self.log.debug("Alias: {0.alias}, regex: {0.alias_regex}".format(self))
self.log.debug("Attrs: {0.attrs_cmp!r}, regex: {0.attrs_regex}, " self.log.debug("Attrs: {0.attrs!r}, regex: {0.attrs_regex}, "
"eq: {0.attrs_equal}".format(self)) "eq: {0.attrs_equal}".format(self))
self.log.debug("Permissive: {0.match_permissive}, state: {0.permissive}".format(self)) self.log.debug("Permissive: {0.permissive}".format(self))
for t in self.policy.types(): for t in self.policy.types():
if self.name and not self._match_name(t): if not self._match_name(t):
continue continue
if self.alias and not self._match_alias(t.aliases()): if not self._match_alias(t):
continue continue
if self.attrs and not self._match_regex_or_set( if self.attrs and not self._match_regex_or_set(
set(t.attributes()), set(t.attributes()),
self.attrs_cmp, self.attrs,
self.attrs_equal, self.attrs_equal,
self.attrs_regex): self.attrs_regex):
continue continue
if self.match_permissive and t.ispermissive != self.permissive: if self.permissive is not None and t.ispermissive != self.permissive:
continue continue
yield t yield t
def set_attrs(self, attrs, **opts):
"""
Set the criteria for the type's attributes.
Parameter:
alias Name to match the component's attributes.
Keyword Options:
regex If true, regular expression matching will be used
instead of set logic.
equal If true, the attribute set of the type
must equal the attributes criteria to
match. If false, any intersection in the
critera will cause a rule match.
Exceptions:
NameError Invalid keyword option.
"""
self.attrs = attrs
for k in list(opts.keys()):
if k == "regex":
self.attrs_regex = opts[k]
elif k == "equal":
self.attrs_equal = opts[k]
else:
raise NameError("Invalid alias option: {0}".format(k))
if not self.attrs:
self.attrs_cmp = None
elif self.attrs_regex:
self.attrs_cmp = re.compile(self.attrs)
else:
self.attrs_cmp = set(self.policy.lookup_typeattr(a) for a in self.attrs)
def set_permissive(self, match, **opts):
"""
Set if the permissive state should be matched.
Parameter:
match If true, the permissive state will be matched.
permissive If true, permissive types will match, otherwise
enforcing types will match.
Exceptions:
NameError Invalid keyword option.
"""
self.match_permissive = bool(match)
for k in list(opts.keys()):
if k == "permissive":
self.permissive = bool(opts[k])
else:
raise NameError("Invalid permissive option: {0}".format(k))

View File

@ -20,63 +20,65 @@ import logging
import re import re
from . import compquery from . import compquery
from .descriptors import CriteriaDescriptor, CriteriaSetDescriptor
class UserQuery(compquery.ComponentQuery): class UserQuery(compquery.ComponentQuery):
"""Query SELinux policy users.""" """
Query SELinux policy users.
def __init__(self, policy, Parameter:
name=None, name_regex=False, policy The policy to query.
roles=None, roles_equal=False, roles_regex=False,
level=None, level_dom=False, level_domby=False, level_incomp=False,
range_=None, range_overlap=False, range_subset=False,
range_superset=False, range_proper=False):
"""
Parameter:
policy The policy to query.
name The user name to match.
name_regex If true, regular expression matching
will be used on the user names.
roles The attribute to match.
roles_equal If true, only types with role sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
roles_regex If true, regular expression matching
will be used on the role names instead
of set logic.
level The criteria to match the user's default level.
level_dom If true, the criteria will match if it dominates
the user's default level.
level_domby If true, the criteria will match if it is dominated
by the user's default level.
level_incomp If true, the criteria will match if it is incomparable
to the user's default level.
range_ The criteria to match the user's range.
range_subset If true, the criteria will match if it is a subset
of the user's range.
range_overlap If true, the criteria will match if it overlaps
any of the user's range.
range_superset If true, the criteria will match if it is a superset
of the user's range.
range_proper If true, use proper superset/subset operations.
No effect if not using set operations.
"""
self.log = logging.getLogger(self.__class__.__name__)
self.policy = policy Keyword Parameters/Class attributes:
self.set_name(name, regex=name_regex) name The user name to match.
self.set_roles(roles, regex=roles_regex, equal=roles_equal) name_regex If true, regular expression matching
self.set_level(level, dom=level_dom, domby=level_domby, incomp=level_incomp) will be used on the user names.
self.set_range(range_, overlap=range_overlap, subset=range_subset, roles The attribute to match.
superset=range_superset, proper=range_proper) roles_equal If true, only types with role sets
that are equal to the criteria will
match. Otherwise, any intersection
will match.
roles_regex If true, regular expression matching
will be used on the role names instead
of set logic.
level The criteria to match the user's default level.
level_dom If true, the criteria will match if it dominates
the user's default level.
level_domby If true, the criteria will match if it is dominated
by the user's default level.
level_incomp If true, the criteria will match if it is incomparable
to the user's default level.
range_ The criteria to match the user's range.
range_subset If true, the criteria will match if it is a subset
of the user's range.
range_overlap If true, the criteria will match if it overlaps
any of the user's range.
range_superset If true, the criteria will match if it is a superset
of the user's range.
range_proper If true, use proper superset/subset operations.
No effect if not using set operations.
"""
level = CriteriaDescriptor(lookup_function="lookup_level")
level_dom = False
level_domby = False
level_incomp = False
range_ = CriteriaDescriptor(lookup_function="lookup_range")
range_overlap = False
range_subset = False
range_superset = False
range_proper = False
roles = CriteriaSetDescriptor("roles_regex", "lookup_role")
roles_equal = False
roles_regex = False
def results(self): def results(self):
"""Generator which yields all matching users.""" """Generator which yields all matching users."""
self.log.info("Generating results from {0.policy}".format(self)) self.log.info("Generating results from {0.policy}".format(self))
self.log.debug("Name: {0.name_cmp!r}, regex: {0.name_regex}".format(self)) self.log.debug("Name: {0.name!r}, regex: {0.name_regex}".format(self))
self.log.debug("Roles: {0.roles_cmp!r}, regex: {0.roles_regex}, " self.log.debug("Roles: {0.roles!r}, regex: {0.roles_regex}, "
"eq: {0.roles_equal}".format(self)) "eq: {0.roles_equal}".format(self))
self.log.debug("Level: {0.level!r}, dom: {0.level_dom}, domby: {0.level_domby}, " self.log.debug("Level: {0.level!r}, dom: {0.level_dom}, domby: {0.level_domby}, "
"incomp: {0.level_incomp}".format(self)) "incomp: {0.level_incomp}".format(self))
@ -84,15 +86,12 @@ class UserQuery(compquery.ComponentQuery):
"superset: {0.range_superset}, proper: {0.range_proper}".format(self)) "superset: {0.range_superset}, proper: {0.range_proper}".format(self))
for user in self.policy.users(): for user in self.policy.users():
if self.name and not self._match_regex( if not self._match_name(user):
user,
self.name_cmp,
self.name_regex):
continue continue
if self.roles and not self._match_regex_or_set( if self.roles and not self._match_regex_or_set(
user.roles, user.roles,
self.roles_cmp, self.roles,
self.roles_equal, self.roles_equal,
self.roles_regex): self.roles_regex):
continue continue
@ -115,108 +114,3 @@ class UserQuery(compquery.ComponentQuery):
continue continue
yield user yield user
def set_level(self, level, **opts):
"""
Set the criteria for matching the user's default level.
Parameter:
level Criteria to match the user's default level.
Keyword Parameters:
dom If true, the criteria will match if it dominates the user's default level.
domby If true, the criteria will match if it is dominated by the user's default level.
incomp If true, the criteria will match if it incomparable to the user's default level.
Exceptions:
NameError Invalid keyword option.
"""
if level:
self.level = self.policy.lookup_level(level)
else:
self.level = None
for k in list(opts.keys()):
if k == "dom":
self.level_dom = opts[k]
elif k == "domby":
self.level_domby = opts[k]
elif k == "incomp":
self.level_incomp = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
def set_range(self, range_, **opts):
"""
Set the criteria for matching the user's range.
Parameter:
range_ Criteria to match the user's range.
Keyword Parameters:
subset If true, the criteria will match if it is a subset
of the user's range.
overlap If true, the criteria will match if it overlaps
any of the user's range.
superset If true, the criteria will match if it is a superset
of the user's range.
proper If true, use proper superset/subset operations.
No effect if not using set operations.
Exceptions:
NameError Invalid keyword option.
"""
if range_:
self.range_ = self.policy.lookup_range(range_)
else:
self.range_ = None
for k in list(opts.keys()):
if k == "subset":
self.range_subset = opts[k]
elif k == "overlap":
self.range_overlap = opts[k]
elif k == "superset":
self.range_superset = opts[k]
elif k == "proper":
self.range_proper = opts[k]
else:
raise NameError("Invalid name option: {0}".format(k))
def set_roles(self, roles, **opts):
"""
Set the criteria for the users's roles.
Parameter:
roles Name to match the component's attributes.
Keyword Options:
regex If true, regular expression matching will be used
instead of set logic.
equal If true, the role set of the user
must equal the attributes criteria to
match. If false, any intersection in the
critera will cause a rule match.
Exceptions:
NameError Invalid keyword option.
"""
self.roles = roles
for k in list(opts.keys()):
if k == "regex":
self.roles_regex = opts[k]
elif k == "equal":
self.roles_equal = opts[k]
else:
raise NameError("Invalid roles option: {0}".format(k))
if not self.roles:
self.roles_cmp = None
elif self.roles_regex:
self.roles_cmp = re.compile(self.roles)
else:
self.roles_cmp = set(self.policy.lookup_role(r) for r in self.roles)

View File

@ -52,7 +52,7 @@ class BoolQueryTest(unittest.TestCase):
def test_010_default(self): def test_010_default(self):
"""Boolean query with default state match.""" """Boolean query with default state match."""
q = BoolQuery(self.p, match_default=True, default=False) q = BoolQuery(self.p, default=False)
bools = sorted(str(b) for b in q.results()) bools = sorted(str(b) for b in q.results())
self.assertListEqual(["test10a", "test10b"], bools) self.assertListEqual(["test10a", "test10b"], bools)

View File

@ -42,6 +42,7 @@ class ConstraintQueryTest(unittest.TestCase):
constraint = sorted(c.tclass for c in q.results()) constraint = sorted(c.tclass for c in q.results())
self.assertListEqual(["test1"], constraint) self.assertListEqual(["test1"], constraint)
@unittest.skip("Setting tclass to a string is no longer supported.")
def test_010_class_exact(self): def test_010_class_exact(self):
"""Constraint query with exact object class match.""" """Constraint query with exact object class match."""
q = ConstraintQuery(self.p, tclass="test10") q = ConstraintQuery(self.p, tclass="test10")

View File

@ -347,7 +347,7 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
# Don't check node list since the disconnected nodes are not # Don't check node list since the disconnected nodes are not
# removed after removing invalid domain transitions # removed after removing invalid domain transitions
self.a.set_reverse(False) self.a.reverse = False
self.a._build_subgraph() self.a._build_subgraph()
start = self.p.lookup_type("start") start = self.p.lookup_type("start")
@ -375,7 +375,7 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
# Don't check node list since the disconnected nodes are not # Don't check node list since the disconnected nodes are not
# removed after removing invalid domain transitions # removed after removing invalid domain transitions
self.a.set_reverse(True) self.a.reverse = True
self.a._build_subgraph() self.a._build_subgraph()
start = self.p.lookup_type("start") start = self.p.lookup_type("start")
@ -399,8 +399,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
# Don't check node list since the disconnected nodes are not # Don't check node list since the disconnected nodes are not
# removed after removing invalid domain transitions # removed after removing invalid domain transitions
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans1"]) self.a.exclude = ["trans1"]
self.a._build_subgraph() self.a._build_subgraph()
start = self.p.lookup_type("start") start = self.p.lookup_type("start")
@ -421,8 +421,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
# Don't check node list since the disconnected nodes are not # Don't check node list since the disconnected nodes are not
# removed after removing invalid domain transitions # removed after removing invalid domain transitions
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans3_exec1"]) self.a.exclude = ["trans3_exec1"]
self.a._build_subgraph() self.a._build_subgraph()
start = self.p.lookup_type("start") start = self.p.lookup_type("start")
@ -446,8 +446,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
# Don't check node list since the disconnected nodes are not # Don't check node list since the disconnected nodes are not
# removed after removing invalid domain transitions # removed after removing invalid domain transitions
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["bothtrans200_exec"]) self.a.exclude = ["bothtrans200_exec"]
self.a._build_subgraph() self.a._build_subgraph()
start = self.p.lookup_type("start") start = self.p.lookup_type("start")
@ -471,8 +471,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
# Don't check node list since the disconnected nodes are not # Don't check node list since the disconnected nodes are not
# removed after removing invalid domain transitions # removed after removing invalid domain transitions
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans2_exec"]) self.a.exclude = ["trans2_exec"]
self.a._build_subgraph() self.a._build_subgraph()
start = self.p.lookup_type("start") start = self.p.lookup_type("start")
@ -492,8 +492,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_300_all_paths(self): def test_300_all_paths(self):
"""DTA: all paths output""" """DTA: all paths output"""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
expected_path = ["start", "dyntrans100", "bothtrans200"] expected_path = ["start", "dyntrans100", "bothtrans200"]
@ -533,8 +533,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_301_all_shortest_paths(self): def test_301_all_shortest_paths(self):
"""DTA: all shortest paths output""" """DTA: all shortest paths output"""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
expected_path = ["start", "dyntrans100", "bothtrans200"] expected_path = ["start", "dyntrans100", "bothtrans200"]
@ -574,8 +574,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_302_shortest_path(self): def test_302_shortest_path(self):
"""DTA: shortest path output""" """DTA: shortest path output"""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
expected_path = ["start", "dyntrans100", "bothtrans200"] expected_path = ["start", "dyntrans100", "bothtrans200"]
@ -615,8 +615,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_303_transitions(self): def test_303_transitions(self):
"""DTA: transitions output""" """DTA: transitions output"""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
transitions = list(self.a.transitions("start")) transitions = list(self.a.transitions("start"))
self.assertEqual(2, len(transitions)) self.assertEqual(2, len(transitions))
@ -652,8 +652,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_310_all_paths_reversed(self): def test_310_all_paths_reversed(self):
"""DTA: all paths output reverse DTA""" """DTA: all paths output reverse DTA"""
self.a.set_reverse(True) self.a.reverse = True
self.a.set_exclude(None) self.a.exclude = None
expected_path = ["bothtrans200", "dyntrans100", "start"] expected_path = ["bothtrans200", "dyntrans100", "start"]
@ -693,8 +693,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_311_all_shortest_paths_reversed(self): def test_311_all_shortest_paths_reversed(self):
"""DTA: all shortest paths output reverse DTA""" """DTA: all shortest paths output reverse DTA"""
self.a.set_reverse(True) self.a.reverse = True
self.a.set_exclude(None) self.a.exclude = None
expected_path = ["bothtrans200", "dyntrans100", "start"] expected_path = ["bothtrans200", "dyntrans100", "start"]
@ -734,8 +734,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_312_shortest_path_reversed(self): def test_312_shortest_path_reversed(self):
"""DTA: shortest path output reverse DTA""" """DTA: shortest path output reverse DTA"""
self.a.set_reverse(True) self.a.reverse = True
self.a.set_exclude(None) self.a.exclude = None
expected_path = ["bothtrans200", "dyntrans100", "start"] expected_path = ["bothtrans200", "dyntrans100", "start"]
@ -775,8 +775,8 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_313_transitions_reversed(self): def test_313_transitions_reversed(self):
"""DTA: transitions output reverse DTA""" """DTA: transitions output reverse DTA"""
self.a.set_reverse(True) self.a.reverse = True
self.a.set_exclude(None) self.a.exclude = None
transitions = list(self.a.transitions("bothtrans200")) transitions = list(self.a.transitions("bothtrans200"))
self.assertEqual(1, len(transitions)) self.assertEqual(1, len(transitions))
@ -812,160 +812,161 @@ class DomainTransitionAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_900_set_exclude_invalid_type(self): def test_900_set_exclude_invalid_type(self):
"""DTA: set invalid excluded type.""" """DTA: set invalid excluded type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
self.assertRaises(InvalidType, self.a.set_exclude, ["trans1", "invalid_type"]) with self.assertRaises(InvalidType):
self.a.exclude = ["trans1", "invalid_type"]
def test_910_all_paths_invalid_source(self): def test_910_all_paths_invalid_source(self):
"""DTA: all paths with invalid source type.""" """DTA: all paths with invalid source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_paths("invalid_type", "trans1")) list(self.a.all_paths("invalid_type", "trans1"))
def test_911_all_paths_invalid_target(self): def test_911_all_paths_invalid_target(self):
"""DTA: all paths with invalid target type.""" """DTA: all paths with invalid target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_paths("trans1", "invalid_type")) list(self.a.all_paths("trans1", "invalid_type"))
def test_912_all_paths_invalid_maxlen(self): def test_912_all_paths_invalid_maxlen(self):
"""DTA: all paths with invalid max path length.""" """DTA: all paths with invalid max path length."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
list(self.a.all_paths("trans1", "trans2", maxlen=-2)) list(self.a.all_paths("trans1", "trans2", maxlen=-2))
def test_913_all_paths_source_excluded(self): def test_913_all_paths_source_excluded(self):
"""DTA: all paths with excluded source type.""" """DTA: all paths with excluded source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans1"]) self.a.exclude = ["trans1"]
paths = list(self.a.all_paths("trans1", "trans2")) paths = list(self.a.all_paths("trans1", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_914_all_paths_target_excluded(self): def test_914_all_paths_target_excluded(self):
"""DTA: all paths with excluded target type.""" """DTA: all paths with excluded target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans2"]) self.a.exclude = ["trans2"]
paths = list(self.a.all_paths("trans1", "trans2")) paths = list(self.a.all_paths("trans1", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_915_all_paths_source_disconnected(self): def test_915_all_paths_source_disconnected(self):
"""DTA: all paths with disconnected source type.""" """DTA: all paths with disconnected source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
paths = list(self.a.all_paths("trans5", "trans2")) paths = list(self.a.all_paths("trans5", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_916_all_paths_target_disconnected(self): def test_916_all_paths_target_disconnected(self):
"""DTA: all paths with disconnected target type.""" """DTA: all paths with disconnected target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans3"]) self.a.exclude = ["trans3"]
paths = list(self.a.all_paths("trans2", "trans5")) paths = list(self.a.all_paths("trans2", "trans5"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_920_shortest_path_invalid_source(self): def test_920_shortest_path_invalid_source(self):
"""DTA: shortest path with invalid source type.""" """DTA: shortest path with invalid source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.shortest_path("invalid_type", "trans1")) list(self.a.shortest_path("invalid_type", "trans1"))
def test_921_shortest_path_invalid_target(self): def test_921_shortest_path_invalid_target(self):
"""DTA: shortest path with invalid target type.""" """DTA: shortest path with invalid target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.shortest_path("trans1", "invalid_type")) list(self.a.shortest_path("trans1", "invalid_type"))
def test_922_shortest_path_source_excluded(self): def test_922_shortest_path_source_excluded(self):
"""DTA: shortest path with excluded source type.""" """DTA: shortest path with excluded source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans1"]) self.a.exclude = ["trans1"]
paths = list(self.a.shortest_path("trans1", "trans2")) paths = list(self.a.shortest_path("trans1", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_923_shortest_path_target_excluded(self): def test_923_shortest_path_target_excluded(self):
"""DTA: shortest path with excluded target type.""" """DTA: shortest path with excluded target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans2"]) self.a.exclude = ["trans2"]
paths = list(self.a.shortest_path("trans1", "trans2")) paths = list(self.a.shortest_path("trans1", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_924_shortest_path_source_disconnected(self): def test_924_shortest_path_source_disconnected(self):
"""DTA: shortest path with disconnected source type.""" """DTA: shortest path with disconnected source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
paths = list(self.a.shortest_path("trans5", "trans2")) paths = list(self.a.shortest_path("trans5", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_925_shortest_path_target_disconnected(self): def test_925_shortest_path_target_disconnected(self):
"""DTA: shortest path with disconnected target type.""" """DTA: shortest path with disconnected target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans3"]) self.a.exclude = ["trans3"]
paths = list(self.a.shortest_path("trans2", "trans5")) paths = list(self.a.shortest_path("trans2", "trans5"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_930_all_shortest_paths_invalid_source(self): def test_930_all_shortest_paths_invalid_source(self):
"""DTA: all shortest paths with invalid source type.""" """DTA: all shortest paths with invalid source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_shortest_paths("invalid_type", "trans1")) list(self.a.all_shortest_paths("invalid_type", "trans1"))
def test_931_all_shortest_paths_invalid_target(self): def test_931_all_shortest_paths_invalid_target(self):
"""DTA: all shortest paths with invalid target type.""" """DTA: all shortest paths with invalid target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_shortest_paths("trans1", "invalid_type")) list(self.a.all_shortest_paths("trans1", "invalid_type"))
def test_932_all_shortest_paths_source_excluded(self): def test_932_all_shortest_paths_source_excluded(self):
"""DTA: all shortest paths with excluded source type.""" """DTA: all shortest paths with excluded source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans1"]) self.a.exclude = ["trans1"]
paths = list(self.a.all_shortest_paths("trans1", "trans2")) paths = list(self.a.all_shortest_paths("trans1", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_933_all_shortest_paths_target_excluded(self): def test_933_all_shortest_paths_target_excluded(self):
"""DTA: all shortest paths with excluded target type.""" """DTA: all shortest paths with excluded target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans2"]) self.a.exclude = ["trans2"]
paths = list(self.a.all_shortest_paths("trans1", "trans2")) paths = list(self.a.all_shortest_paths("trans1", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_934_all_shortest_paths_source_disconnected(self): def test_934_all_shortest_paths_source_disconnected(self):
"""DTA: all shortest paths with disconnected source type.""" """DTA: all shortest paths with disconnected source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
paths = list(self.a.all_shortest_paths("trans5", "trans2")) paths = list(self.a.all_shortest_paths("trans5", "trans2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_935_all_shortest_paths_target_disconnected(self): def test_935_all_shortest_paths_target_disconnected(self):
"""DTA: all shortest paths with disconnected target type.""" """DTA: all shortest paths with disconnected target type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans3"]) self.a.exclude = ["trans3"]
paths = list(self.a.all_shortest_paths("trans2", "trans5")) paths = list(self.a.all_shortest_paths("trans2", "trans5"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_940_transitions_invalid_source(self): def test_940_transitions_invalid_source(self):
"""DTA: transitions with invalid source type.""" """DTA: transitions with invalid source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(None) self.a.exclude = None
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.transitions("invalid_type")) list(self.a.transitions("invalid_type"))
def test_941_transitions_source_excluded(self): def test_941_transitions_source_excluded(self):
"""DTA: transitions with excluded source type.""" """DTA: transitions with excluded source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans1"]) self.a.exclude = ["trans1"]
paths = list(self.a.transitions("trans1")) paths = list(self.a.transitions("trans1"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_942_transitions_source_disconnected(self): def test_942_transitions_source_disconnected(self):
"""DTA: transitions with disconnected source type.""" """DTA: transitions with disconnected source type."""
self.a.set_reverse(False) self.a.reverse = False
self.a.set_exclude(["trans3"]) self.a.exclude = ["trans3"]
paths = list(self.a.transitions("trans5")) paths = list(self.a.transitions("trans5"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))

View File

@ -129,8 +129,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_100_minimum_3(self): def test_100_minimum_3(self):
"""Information flow analysis with minimum weight 3.""" """Information flow analysis with minimum weight 3."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(3) self.a.min_weight = 3
self.a._build_subgraph() self.a._build_subgraph()
disconnected1 = self.p.lookup_type("disconnected1") disconnected1 = self.p.lookup_type("disconnected1")
@ -166,8 +166,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_200_minimum_8(self): def test_200_minimum_8(self):
"""Information flow analysis with minimum weight 8.""" """Information flow analysis with minimum weight 8."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(8) self.a.min_weight = 8
self.a._build_subgraph() self.a._build_subgraph()
disconnected1 = self.p.lookup_type("disconnected1") disconnected1 = self.p.lookup_type("disconnected1")
@ -199,8 +199,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_300_all_paths(self): def test_300_all_paths(self):
"""Information flow analysis: all paths output""" """Information flow analysis: all paths output"""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_paths("node1", "node4", 3)) paths = list(self.a.all_paths("node1", "node4", 3))
self.assertEqual(1, len(paths)) self.assertEqual(1, len(paths))
@ -226,8 +226,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_301_all_shortest_paths(self): def test_301_all_shortest_paths(self):
"""Information flow analysis: all shortest paths output""" """Information flow analysis: all shortest paths output"""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_shortest_paths("node1", "node4")) paths = list(self.a.all_shortest_paths("node1", "node4"))
self.assertEqual(1, len(paths)) self.assertEqual(1, len(paths))
@ -253,8 +253,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_302_shortest_path(self): def test_302_shortest_path(self):
"""Information flow analysis: shortest path output""" """Information flow analysis: shortest path output"""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.shortest_path("node1", "node4")) paths = list(self.a.shortest_path("node1", "node4"))
self.assertEqual(1, len(paths)) self.assertEqual(1, len(paths))
@ -280,8 +280,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_303_infoflows_out(self): def test_303_infoflows_out(self):
"""Information flow analysis: flows out of a type""" """Information flow analysis: flows out of a type"""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
for flow in self.a.infoflows("node6"): for flow in self.a.infoflows("node6"):
self.assertIsInstance(flow.source, Type) self.assertIsInstance(flow.source, Type)
@ -292,8 +292,8 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_304_infoflows_in(self): def test_304_infoflows_in(self):
"""Information flow analysis: flows in to a type""" """Information flow analysis: flows in to a type"""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
for flow in self.a.infoflows("node8", out=False): for flow in self.a.infoflows("node8", out=False):
self.assertIsInstance(flow.source, Type) self.assertIsInstance(flow.source, Type)
@ -304,168 +304,176 @@ class InfoFlowAnalysisTest(mixins.ValidateRule, unittest.TestCase):
def test_900_set_exclude_invalid_type(self): def test_900_set_exclude_invalid_type(self):
"""Information flow analysis: set invalid excluded type.""" """Information flow analysis: set invalid excluded type."""
self.assertRaises(InvalidType, self.a.set_exclude, ["node1", "invalid_type"]) with self.assertRaises(InvalidType):
self.a.exclude = ["node1", "invalid_type"]
def test_901_set_small_min_weight(self): def test_901_set_small_min_weight(self):
"""Information flow analysis: set too small weight.""" """Information flow analysis: set too small weight."""
self.assertRaises(ValueError, self.a.set_min_weight, 0)
self.assertRaises(ValueError, self.a.set_min_weight, -3) with self.assertRaises(ValueError):
self.a.min_weight = 0
with self.assertRaises(ValueError):
self.a.min_weight = -3
def test_902_set_large_min_weight(self): def test_902_set_large_min_weight(self):
"""Information flow analysis: set too big weight.""" """Information flow analysis: set too big weight."""
self.assertRaises(ValueError, self.a.set_min_weight, 11) with self.assertRaises(ValueError):
self.assertRaises(ValueError, self.a.set_min_weight, 50) self.a.min_weight = 11
with self.assertRaises(ValueError):
self.a.min_weight = 50
def test_910_all_paths_invalid_source(self): def test_910_all_paths_invalid_source(self):
"""Information flow analysis: all paths with invalid source type.""" """Information flow analysis: all paths with invalid source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_paths("invalid_type", "node1")) list(self.a.all_paths("invalid_type", "node1"))
def test_911_all_paths_invalid_target(self): def test_911_all_paths_invalid_target(self):
"""Information flow analysis: all paths with invalid target type.""" """Information flow analysis: all paths with invalid target type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_paths("node1", "invalid_type")) list(self.a.all_paths("node1", "invalid_type"))
def test_912_all_paths_invalid_maxlen(self): def test_912_all_paths_invalid_maxlen(self):
"""Information flow analysis: all paths with invalid max path length.""" """Information flow analysis: all paths with invalid max path length."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
list(self.a.all_paths("node1", "node2", maxlen=-2)) list(self.a.all_paths("node1", "node2", maxlen=-2))
def test_913_all_paths_source_excluded(self): def test_913_all_paths_source_excluded(self):
"""Information flow analysis: all paths with excluded source type.""" """Information flow analysis: all paths with excluded source type."""
self.a.set_exclude(["node1"]) self.a.exclude = ["node1"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_paths("node1", "node2")) paths = list(self.a.all_paths("node1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_914_all_paths_target_excluded(self): def test_914_all_paths_target_excluded(self):
"""Information flow analysis: all paths with excluded target type.""" """Information flow analysis: all paths with excluded target type."""
self.a.set_exclude(["node2"]) self.a.exclude = ["node2"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_paths("node1", "node2")) paths = list(self.a.all_paths("node1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_915_all_paths_source_disconnected(self): def test_915_all_paths_source_disconnected(self):
"""Information flow analysis: all paths with disconnected source type.""" """Information flow analysis: all paths with disconnected source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_paths("disconnected1", "node2")) paths = list(self.a.all_paths("disconnected1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_916_all_paths_target_disconnected(self): def test_916_all_paths_target_disconnected(self):
"""Information flow analysis: all paths with disconnected target type.""" """Information flow analysis: all paths with disconnected target type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_paths("node2", "disconnected1")) paths = list(self.a.all_paths("node2", "disconnected1"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_920_shortest_path_invalid_source(self): def test_920_shortest_path_invalid_source(self):
"""Information flow analysis: shortest path with invalid source type.""" """Information flow analysis: shortest path with invalid source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.shortest_path("invalid_type", "node1")) list(self.a.shortest_path("invalid_type", "node1"))
def test_921_shortest_path_invalid_target(self): def test_921_shortest_path_invalid_target(self):
"""Information flow analysis: shortest path with invalid target type.""" """Information flow analysis: shortest path with invalid target type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.shortest_path("node1", "invalid_type")) list(self.a.shortest_path("node1", "invalid_type"))
def test_922_shortest_path_source_excluded(self): def test_922_shortest_path_source_excluded(self):
"""Information flow analysis: shortest path with excluded source type.""" """Information flow analysis: shortest path with excluded source type."""
self.a.set_exclude(["node1"]) self.a.exclude = ["node1"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.shortest_path("node1", "node2")) paths = list(self.a.shortest_path("node1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_923_shortest_path_target_excluded(self): def test_923_shortest_path_target_excluded(self):
"""Information flow analysis: shortest path with excluded target type.""" """Information flow analysis: shortest path with excluded target type."""
self.a.set_exclude(["node2"]) self.a.exclude = ["node2"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.shortest_path("node1", "node2")) paths = list(self.a.shortest_path("node1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_924_shortest_path_source_disconnected(self): def test_924_shortest_path_source_disconnected(self):
"""Information flow analysis: shortest path with disconnected source type.""" """Information flow analysis: shortest path with disconnected source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.shortest_path("disconnected1", "node2")) paths = list(self.a.shortest_path("disconnected1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_925_shortest_path_target_disconnected(self): def test_925_shortest_path_target_disconnected(self):
"""Information flow analysis: shortest path with disconnected target type.""" """Information flow analysis: shortest path with disconnected target type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.shortest_path("node2", "disconnected1")) paths = list(self.a.shortest_path("node2", "disconnected1"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_930_all_shortest_paths_invalid_source(self): def test_930_all_shortest_paths_invalid_source(self):
"""Information flow analysis: all shortest paths with invalid source type.""" """Information flow analysis: all shortest paths with invalid source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_shortest_paths("invalid_type", "node1")) list(self.a.all_shortest_paths("invalid_type", "node1"))
def test_931_all_shortest_paths_invalid_target(self): def test_931_all_shortest_paths_invalid_target(self):
"""Information flow analysis: all shortest paths with invalid target type.""" """Information flow analysis: all shortest paths with invalid target type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.all_shortest_paths("node1", "invalid_type")) list(self.a.all_shortest_paths("node1", "invalid_type"))
def test_932_all_shortest_paths_source_excluded(self): def test_932_all_shortest_paths_source_excluded(self):
"""Information flow analysis: all shortest paths with excluded source type.""" """Information flow analysis: all shortest paths with excluded source type."""
self.a.set_exclude(["node1"]) self.a.exclude = ["node1"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_shortest_paths("node1", "node2")) paths = list(self.a.all_shortest_paths("node1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_933_all_shortest_paths_target_excluded(self): def test_933_all_shortest_paths_target_excluded(self):
"""Information flow analysis: all shortest paths with excluded target type.""" """Information flow analysis: all shortest paths with excluded target type."""
self.a.set_exclude(["node2"]) self.a.exclude = ["node2"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_shortest_paths("node1", "node2")) paths = list(self.a.all_shortest_paths("node1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_934_all_shortest_paths_source_disconnected(self): def test_934_all_shortest_paths_source_disconnected(self):
"""Information flow analysis: all shortest paths with disconnected source type.""" """Information flow analysis: all shortest paths with disconnected source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_shortest_paths("disconnected1", "node2")) paths = list(self.a.all_shortest_paths("disconnected1", "node2"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_935_all_shortest_paths_target_disconnected(self): def test_935_all_shortest_paths_target_disconnected(self):
"""Information flow analysis: all shortest paths with disconnected target type.""" """Information flow analysis: all shortest paths with disconnected target type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.all_shortest_paths("node2", "disconnected1")) paths = list(self.a.all_shortest_paths("node2", "disconnected1"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_940_infoflows_invalid_source(self): def test_940_infoflows_invalid_source(self):
"""Information flow analysis: infoflows with invalid source type.""" """Information flow analysis: infoflows with invalid source type."""
self.a.set_exclude(None) self.a.exclude = None
self.a.set_min_weight(1) self.a.min_weight = 1
with self.assertRaises(InvalidType): with self.assertRaises(InvalidType):
list(self.a.infoflows("invalid_type")) list(self.a.infoflows("invalid_type"))
def test_941_infoflows_source_excluded(self): def test_941_infoflows_source_excluded(self):
"""Information flow analysis: infoflows with excluded source type.""" """Information flow analysis: infoflows with excluded source type."""
self.a.set_exclude(["node1"]) self.a.exclude = ["node1"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.infoflows("node1")) paths = list(self.a.infoflows("node1"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))
def test_942_infoflows_source_disconnected(self): def test_942_infoflows_source_disconnected(self):
"""Information flow analysis: infoflows with disconnected source type.""" """Information flow analysis: infoflows with disconnected source type."""
self.a.set_exclude(["disconnected2"]) self.a.exclude = ["disconnected2"]
self.a.set_min_weight(1) self.a.min_weight = 1
paths = list(self.a.infoflows("disconnected1")) paths = list(self.a.infoflows("disconnected1"))
self.assertEqual(0, len(paths)) self.assertEqual(0, len(paths))

View File

@ -18,6 +18,7 @@
import unittest import unittest
from setools import SELinuxPolicy, MLSRuleQuery from setools import SELinuxPolicy, MLSRuleQuery
from setools.policyrep.exception import InvalidMLSRuleType
from . import mixins from . import mixins
@ -82,6 +83,7 @@ class MLSRuleQueryTest(mixins.ValidateRule, unittest.TestCase):
self.assertEqual(len(r), 1) self.assertEqual(len(r), 1)
self.validate_rule(r[0], "range_transition", "test12s", "test12aFAIL", "infoflow", "s2") self.validate_rule(r[0], "range_transition", "test12s", "test12aFAIL", "infoflow", "s2")
@unittest.skip("Setting tclass to a string is no longer supported.")
def test_020_class(self): def test_020_class(self):
"""MLS rule query with exact object class match.""" """MLS rule query with exact object class match."""
q = MLSRuleQuery(self.p, tclass="infoflow7", tclass_regex=False) q = MLSRuleQuery(self.p, tclass="infoflow7", tclass_regex=False)
@ -273,3 +275,8 @@ class MLSRuleQueryTest(mixins.ValidateRule, unittest.TestCase):
self.assertEqual(len(r), 1) self.assertEqual(len(r), 1)
self.validate_rule(r[0], "range_transition", "test45", "test45", "infoflow", self.validate_rule(r[0], "range_transition", "test45", "test45", "infoflow",
"s45:c1 - s45:c1.c3") "s45:c1 - s45:c1.c3")
def test_900_invalid_ruletype(self):
"""MLS rule query with invalid rule type."""
with self.assertRaises(InvalidMLSRuleType):
q = MLSRuleQuery(self.p, ruletype="type_transition")

View File

@ -135,6 +135,18 @@ role test11t3;
allow test11s test11t1; allow test11s test11t1;
role_transition test11s system:infoflow test11t3; role_transition test11s system:infoflow test11t3;
# test 12
# ruletype: unset
# source: unset
# target: test12t
# class: unset
# default: unset
role test12s;
type test12t;
role test12d;
allow test12s test12d;
role_transition test12s test12t:infoflow test12d;
# test 20 # test 20
# ruletype: unset # ruletype: unset
# source: unset # source: unset

View File

@ -90,6 +90,15 @@ class RBACRuleQueryTest(mixins.ValidateRule, unittest.TestCase):
self.assertEqual(len(r), 1) self.assertEqual(len(r), 1)
self.validate_allow(r[0], "test11s", "test11t1") self.validate_allow(r[0], "test11s", "test11t1")
def test_012_target_type(self):
"""RBAC rule query with a type as target."""
q = RBACRuleQuery(self.p, target="test12t")
r = sorted(q.results())
self.assertEqual(len(r), 1)
self.validate_rule(r[0], "role_transition", "test12s", "test12t", "infoflow", "test12d")
@unittest.skip("Setting tclass to a string is no longer supported.")
def test_020_class(self): def test_020_class(self):
"""RBAC rule query with exact object class match.""" """RBAC rule query with exact object class match."""
q = RBACRuleQuery(self.p, tclass="infoflow2", tclass_regex=False) q = RBACRuleQuery(self.p, tclass="infoflow2", tclass_regex=False)
@ -146,4 +155,4 @@ class RBACRuleQueryTest(mixins.ValidateRule, unittest.TestCase):
# this will have to be updated as number of # this will have to be updated as number of
# role allows change in the test policy # role allows change in the test policy
self.assertEqual(num, 8) self.assertEqual(num, 9)

View File

@ -117,6 +117,7 @@ class TERuleQueryTest(mixins.ValidateRule, unittest.TestCase):
self.validate_rule(r[0], "allow", "test8a1", "test8a1", "infoflow", set(["hi_w"])) self.validate_rule(r[0], "allow", "test8a1", "test8a1", "infoflow", set(["hi_w"]))
self.validate_rule(r[1], "allow", "test8a2", "test8a2", "infoflow", set(["low_r"])) self.validate_rule(r[1], "allow", "test8a2", "test8a2", "infoflow", set(["low_r"]))
@unittest.skip("Setting tclass to a string is no longer supported.")
def test_009_class(self): def test_009_class(self):
"""TE rule query with exact object class match.""" """TE rule query with exact object class match."""
q = TERuleQuery(self.p, tclass="infoflow2", tclass_regex=False) q = TERuleQuery(self.p, tclass="infoflow2", tclass_regex=False)

View File

@ -89,7 +89,7 @@ class TypeQueryTest(unittest.TestCase):
def test_030_permissive(self): def test_030_permissive(self):
"""Type query with permissive match""" """Type query with permissive match"""
q = TypeQuery(self.p, match_permissive=True, permissive=True) q = TypeQuery(self.p, permissive=True)
types = sorted(str(t) for t in q.results()) types = sorted(str(t) for t in q.results())
self.assertListEqual(["test30"], types) self.assertListEqual(["test30"], types)