Package libxyz :: Package core :: Module fsrule
[hide private]
[frames] | no frames]

Source Code for Module libxyz.core.fsrule

  1  #-*- coding: utf8 -* 
  2  # 
  3  # Max E. Kuznecov <syhpoon@syhpoon.name> 2008 
  4  # 
  5  # This file is part of XYZCommander. 
  6  # XYZCommander is free software: you can redistribute it and/or modify 
  7  # it under the terms of the GNU Lesser Public License as published by 
  8  # the Free Software Foundation, either version 3 of the License, or 
  9  # (at your option) any later version. 
 10  # XYZCommander is distributed in the hope that it will be useful, 
 11  # but WITHOUT ANY WARRANTY; without even the implied warranty of 
 12  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 
 13  # GNU Lesser Public License for more details. 
 14  # You should have received a copy of the GNU Lesser Public License 
 15  # along with XYZCommander. If not, see <http://www.gnu.org/licenses/>. 
 16   
 17  import stat 
 18  import pwd 
 19  import grp 
 20  import re 
 21   
 22  import libxyz.parser as parser 
 23   
 24  from libxyz.exceptions import XYZValueError 
 25  from libxyz.exceptions import LexerError 
 26  from libxyz.exceptions import FSRuleError 
 27  from libxyz.vfs.vfsobj import  VFSFile 
 28  from libxyz.vfs.types import * 
 29  from libxyz.core.utils import ustring 
30 31 -class FSRule(parser.BaseParser):
32 """ 33 FS rule parser 34 35 Rule syntax is following: 36 37 rule ::= expr $ 38 | expr op rule 39 expr ::= expr_body 40 | NOT expr_body 41 | "(" rule ")" 42 expr_body ::= ftype "{" ARG "}" 43 op ::= AND | OR 44 ftype ::= TYPE | PERM | OWNER | NAME | SIZE 45 | LINK_TYPE | LINK_PERM | LINK_OWNER | LINK_NAME 46 | LINK_EXISTS | LINK_SIZE 47 48 Examples: 49 50 type{file} and perm{+0111} 51 (owner{user} and not owner{:group}) or owner{root} 52 """ 53 54 # Tokens 55 TOKEN_TYPE = "type" 56 TOKEN_PERM = "perm" 57 TOKEN_OWNER = "owner" 58 TOKEN_NAME = "name" 59 TOKEN_INAME = "iname" 60 TOKEN_SIZE = "size" 61 TOKEN_LINK_TYPE = "link_type" 62 TOKEN_LINK_PERM = "link_perm" 63 TOKEN_LINK_OWNER = "link_owner" 64 TOKEN_LINK_NAME = "link_name" 65 TOKEN_LINK_INAME = "link_iname" 66 TOKEN_LINK_EXISTS = "link_exists" 67 TOKEN_LINK_SIZE = "link_size" 68 TOKEN_AND = "and" 69 TOKEN_OR = "or" 70 TOKEN_NOT = "not" 71 TOKEN_OPEN_BR = "{" 72 TOKEN_CLOSE_BR = "}" 73 TOKEN_OPEN_PAR = "(" 74 TOKEN_CLOSE_PAR = ")" 75 TOKEN_DEFAULT = True 76 TOKEN_ARG = False 77 EOF = None 78 79 TOKENS_EXTENDED = [] 80 TRANSFORM_EXTENDED = {} 81 82 TOKENS = [TOKEN_TYPE, TOKEN_PERM, TOKEN_OWNER, TOKEN_NAME, TOKEN_INAME, 83 TOKEN_LINK_TYPE, TOKEN_LINK_PERM, TOKEN_LINK_OWNER, 84 TOKEN_LINK_NAME, TOKEN_LINK_INAME, TOKEN_LINK_EXISTS, 85 TOKEN_AND, TOKEN_OR, TOKEN_NOT, TOKEN_OPEN_BR, TOKEN_CLOSE_BR, 86 TOKEN_OPEN_PAR, TOKEN_CLOSE_PAR, TOKEN_DEFAULT, 87 TOKEN_SIZE, TOKEN_LINK_SIZE, EOF] 88 89 # Nonterminals 90 NTOKEN_START = 100 91 NTOKEN_RULE = 101 92 NTOKEN_EXPR = 102 93 NTOKEN_EXPR_BODY = 103 94 NTOKEN_OP = 104 95 NTOKEN_FTYPE = 105 96 97 FTYPE = [TOKEN_TYPE, 98 TOKEN_PERM, 99 TOKEN_OWNER, 100 TOKEN_NAME, 101 TOKEN_INAME, 102 TOKEN_SIZE, 103 TOKEN_LINK_TYPE, 104 TOKEN_LINK_PERM, 105 TOKEN_LINK_OWNER, 106 TOKEN_LINK_NAME, 107 TOKEN_LINK_INAME, 108 TOKEN_LINK_EXISTS, 109 TOKEN_LINK_SIZE, 110 ] 111 112 INFIX_OP = (TOKEN_AND, TOKEN_OR) 113 114 @classmethod
115 - def extend(cls, token, trans_func, match_func):
116 """ 117 Extend FSRule parser with new expressions 118 @param token: new token expression 119 @param trans_func: Transformation function 120 @param match_func: Match function 121 """ 122 123 if token in cls.TOKENS_EXTENDED or token in cls.TOKENS or \ 124 token in cls.FTYPE: 125 raise FSRuleError(_(u"Error extending FSRule: "\ 126 u"token %s already registered") % token) 127 128 if not callable(trans_func) or not callable(match_func): 129 raise FSRuleError(_(u"Error extending FSRule: "\ 130 u"trans_func and match_func arguments "\ 131 u"must be functions.")) 132 133 # 1. Append token to lists 134 cls.TOKENS_EXTENDED.append(token) 135 cls.TOKENS.append(token) 136 cls.FTYPE.append(token) 137 138 # 2. Add transformation func 139 cls.TRANSFORM_EXTENDED[token] = trans_func 140 141 # 3. Add match func 142 Expression.extend(token, match_func)
143 144 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 145 146 @classmethod
147 - def unextend(cls, token):
148 """ 149 Remove extended expression from parser 150 """ 151 152 if token not in cls.TOKENS_EXTENDED: 153 return False 154 155 try: 156 cls.TOKENS_EXTENDED.remove(token) 157 except ValueError: 158 pass 159 160 try: 161 cls.TOKENS.remove(token) 162 except ValueError: 163 pass 164 165 try: 166 cls.FTYPE.remove(token) 167 except ValueError: 168 pass 169 170 try: 171 del(cls.TRANSFORM_EXTENDED[token]) 172 except KeyError: 173 pass 174 175 return Expression.unextend(token)
176 177 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 178
179 - def __init__(self, rule):
180 """ 181 @param rule: String rule 182 """ 183 184 super(FSRule, self).__init__() 185 186 self._stack = [] 187 self._done = False 188 self._cur_obj = None 189 self._expressions = parser.lr.Tree() 190 self._exp_pointer = self._expressions 191 self._exp_stack = [] 192 193 # Action table 194 self._action = parser.lr.ActionTable() 195 196 _s = self._shift 197 _r = self._reduce 198 199 self._action.add(0, self.TOKEN_TYPE, (_s, 2)) 200 self._action.add(0, self.TOKEN_PERM, (_s, 3)) 201 self._action.add(0, self.TOKEN_OWNER, (_s, 4)) 202 self._action.add(0, self.TOKEN_NAME, (_s, 5)) 203 self._action.add(0, self.TOKEN_INAME, (_s, 5)) 204 self._action.add(0, self.TOKEN_SIZE, (_s, 27)) 205 self._action.add(0, self.TOKEN_LINK_TYPE, (_s, 27)) 206 self._action.add(0, self.TOKEN_LINK_PERM, (_s, 27)) 207 self._action.add(0, self.TOKEN_LINK_OWNER, (_s, 27)) 208 self._action.add(0, self.TOKEN_LINK_NAME, (_s, 27)) 209 self._action.add(0, self.TOKEN_LINK_INAME, (_s, 27)) 210 self._action.add(0, self.TOKEN_LINK_EXISTS, (_s, 27)) 211 self._action.add(0, self.TOKEN_LINK_SIZE, (_s, 27)) 212 self._action.add(0, self.TOKEN_NOT, (_s, 1)) 213 self._action.add(0, self.TOKEN_OPEN_PAR, (_s, 6)) 214 215 self._action.add(1, self.TOKEN_TYPE, (_s, 2)) 216 self._action.add(1, self.TOKEN_PERM, (_s, 3)) 217 self._action.add(1, self.TOKEN_OWNER, (_s, 4)) 218 self._action.add(1, self.TOKEN_NAME, (_s, 5)) 219 self._action.add(1, self.TOKEN_INAME, (_s, 5)) 220 self._action.add(1, self.TOKEN_SIZE, (_s, 27)) 221 self._action.add(1, self.TOKEN_LINK_TYPE, (_s, 27)) 222 self._action.add(1, self.TOKEN_LINK_PERM, (_s, 27)) 223 self._action.add(1, self.TOKEN_LINK_OWNER, (_s, 27)) 224 self._action.add(1, self.TOKEN_LINK_NAME, (_s, 27)) 225 self._action.add(1, self.TOKEN_LINK_INAME, (_s, 27)) 226 self._action.add(1, self.TOKEN_LINK_EXISTS, (_s, 27)) 227 self._action.add(1, self.TOKEN_LINK_SIZE, (_s, 27)) 228 229 self._action.add(2, self.TOKEN_DEFAULT, (_r, 10)) 230 self._action.add(3, self.TOKEN_DEFAULT, (_r, 11)) 231 self._action.add(4, self.TOKEN_DEFAULT, (_r, 12)) 232 self._action.add(5, self.TOKEN_DEFAULT, (_r, 13)) 233 234 self._action.add(6, self.TOKEN_TYPE, (_s, 2)) 235 self._action.add(6, self.TOKEN_PERM, (_s, 3)) 236 self._action.add(6, self.TOKEN_OWNER, (_s, 4)) 237 self._action.add(6, self.TOKEN_NAME, (_s, 5)) 238 self._action.add(6, self.TOKEN_INAME, (_s, 5)) 239 self._action.add(6, self.TOKEN_SIZE, (_s, 27)) 240 self._action.add(6, self.TOKEN_LINK_TYPE, (_s, 27)) 241 self._action.add(6, self.TOKEN_LINK_PERM, (_s, 27)) 242 self._action.add(6, self.TOKEN_LINK_OWNER, (_s, 27)) 243 self._action.add(6, self.TOKEN_LINK_NAME, (_s, 27)) 244 self._action.add(6, self.TOKEN_LINK_INAME, (_s, 27)) 245 self._action.add(6, self.TOKEN_LINK_EXISTS, (_s, 27)) 246 self._action.add(6, self.TOKEN_LINK_SIZE, (_s, 27)) 247 self._action.add(6, self.TOKEN_NOT, (_s, 1)) 248 self._action.add(6, self.TOKEN_OPEN_PAR, (_s, 6)) 249 250 self._action.add(7, self.EOF, (_s, 14)) 251 self._action.add(8, self.TOKEN_DEFAULT, (_r, 1)) 252 253 self._action.add(9, self.TOKEN_AND, (_s, 15)) 254 self._action.add(9, self.TOKEN_OR, (_s, 16)) 255 self._action.add(9, self.TOKEN_DEFAULT, (_r, 2)) 256 257 self._action.add(10, self.TOKEN_DEFAULT, (_r, 4)) 258 self._action.add(11, self.TOKEN_OPEN_BR, (_s, 18)) 259 self._action.add(12, self.TOKEN_DEFAULT, (_r, 5)) 260 self._action.add(13, self.TOKEN_CLOSE_PAR, (_s, 19)) 261 self._action.add(14, self.TOKEN_DEFAULT, (self._accept, None)) 262 self._action.add(15, self.TOKEN_DEFAULT, (_r, 8)) 263 self._action.add(16, self.TOKEN_DEFAULT, (_r, 9)) 264 265 self._action.add(17, self.TOKEN_TYPE, (_s, 2)) 266 self._action.add(17, self.TOKEN_PERM, (_s, 3)) 267 self._action.add(17, self.TOKEN_OWNER, (_s, 4)) 268 self._action.add(17, self.TOKEN_NAME, (_s, 5)) 269 self._action.add(17, self.TOKEN_INAME, (_s, 5)) 270 self._action.add(17, self.TOKEN_SIZE, (_s, 27)) 271 self._action.add(17, self.TOKEN_LINK_TYPE, (_s, 27)) 272 self._action.add(17, self.TOKEN_LINK_PERM, (_s, 27)) 273 self._action.add(17, self.TOKEN_LINK_OWNER, (_s, 27)) 274 self._action.add(17, self.TOKEN_LINK_NAME, (_s, 27)) 275 self._action.add(17, self.TOKEN_LINK_INAME, (_s, 27)) 276 self._action.add(17, self.TOKEN_LINK_EXISTS, (_s, 27)) 277 self._action.add(17, self.TOKEN_LINK_SIZE, (_s, 27)) 278 self._action.add(17, self.TOKEN_NOT, (_s, 1)) 279 self._action.add(17, self.TOKEN_OPEN_PAR, (_s, 6)) 280 281 self._action.add(18, self.TOKEN_ARG, (_s, 21)) 282 self._action.add(19, self.TOKEN_DEFAULT, (_r, 6)) 283 self._action.add(20, self.TOKEN_DEFAULT, (_r, 3)) 284 self._action.add(21, self.TOKEN_CLOSE_BR, (_s, 22)) 285 self._action.add(22, self.TOKEN_DEFAULT, (_r, 7)) 286 self._action.add(23, self.TOKEN_OPEN_BR, (_s, 24)) 287 self._action.add(24, self.TOKEN_ARG, (_s, 25)) 288 self._action.add(25, self.TOKEN_CLOSE_BR, (_s, 26)) 289 self._action.add(26, self.TOKEN_DEFAULT, (_r, 14)) 290 self._action.add(27, self.TOKEN_DEFAULT, (_r, 131)) 291 292 # For extended functionality 293 for _ext_token in self.TOKENS_EXTENDED: 294 for _state in (0, 1, 6, 17): 295 self._action.add(_state, _ext_token, (_s, 27)) 296 297 self._rules = parser.lr.Rules() 298 299 self._rules.add(1, self.NTOKEN_START, 1) 300 self._rules.add(2, self.NTOKEN_RULE, 1) 301 self._rules.add(3, self.NTOKEN_RULE, 3) 302 self._rules.add(4, self.NTOKEN_EXPR, 1) 303 self._rules.add(5, self.NTOKEN_EXPR, 2) 304 self._rules.add(6, self.NTOKEN_EXPR, 3) 305 self._rules.add(7, self.NTOKEN_EXPR_BODY, 4) 306 self._rules.add(8, self.NTOKEN_OP, 1) 307 self._rules.add(9, self.NTOKEN_OP, 1) 308 self._rules.add(10, self.NTOKEN_FTYPE, 1) 309 self._rules.add(11, self.NTOKEN_FTYPE, 1) 310 self._rules.add(12, self.NTOKEN_FTYPE, 1) 311 self._rules.add(13, self.NTOKEN_FTYPE, 1) 312 self._rules.add(14, self.NTOKEN_EXPR_BODY, 5) 313 self._rules.add(131, self.NTOKEN_FTYPE, 1) 314 315 # Goto table 316 self._goto = parser.lr.GotoTable() 317 318 self._goto.add(0, self.NTOKEN_START, 7) 319 self._goto.add(0, self.NTOKEN_RULE, 8) 320 self._goto.add(0, self.NTOKEN_EXPR, 9) 321 self._goto.add(0, self.NTOKEN_EXPR_BODY, 10) 322 self._goto.add(0, self.NTOKEN_FTYPE, 11) 323 324 self._goto.add(1, self.NTOKEN_EXPR_BODY, 10) 325 self._goto.add(1, self.NTOKEN_FTYPE, 23) 326 327 self._goto.add(6, self.NTOKEN_RULE, 13) 328 self._goto.add(6, self.NTOKEN_EXPR, 9) 329 self._goto.add(6, self.NTOKEN_EXPR_BODY, 10) 330 self._goto.add(6, self.NTOKEN_FTYPE, 11) 331 332 self._goto.add(9, self.NTOKEN_OP, 17) 333 334 self._goto.add(17, self.NTOKEN_RULE, 20) 335 self._goto.add(17, self.NTOKEN_EXPR, 9) 336 self._goto.add(17, self.NTOKEN_EXPR_BODY, 10) 337 self._goto.add(17, self.NTOKEN_FTYPE, 11) 338 339 self._unget = [] 340 self._chain = self._parse(rule)
341 342 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 343
344 - def match(self, obj):
345 """ 346 Match given object against rule 347 348 @param obj: VFSFile instance 349 @return: True if matches and False otherwise 350 """ 351 352 if not isinstance(obj, VFSFile): 353 raise XYZValueError(_(u"Invalid argument type: %s, "\ 354 u"VFSFile expected") % type(obj)) 355 356 return self._match(obj, self._expressions)
357 358 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 359
360 - def _match(self, obj, _expressions):
361 _op = None 362 _res = None 363 364 for exp in _expressions: 365 if exp in ("AND", "OR"): 366 _op = exp 367 continue 368 369 if isinstance(exp, parser.lr.Tree): 370 # Recursive match subrule 371 _r = self._match(obj, exp) 372 else: 373 _r = exp.match(obj) 374 375 if _res is not None: 376 if _op == "AND": 377 _res = _res and _r 378 379 # Short-circuit: do not continue if got false on AND 380 # expression 381 if not _res: 382 break 383 elif _op == "OR": 384 _res = _res or _r 385 386 # Short-circuit: do not continue if got true on OR 387 # expression 388 if _res: 389 break 390 else: 391 _res = _r 392 393 _op = None 394 395 if _res is None: 396 return _r 397 else: 398 return _res
399 400 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 401
402 - def _parse(self, rule):
403 """ 404 Parse rule 405 """ 406 407 # Initial state 408 self._stack.append(0) 409 410 _tokens = (self.TOKEN_OPEN_PAR, 411 self.TOKEN_CLOSE_PAR, 412 self.TOKEN_OPEN_BR, 413 self.TOKEN_CLOSE_BR, 414 u"=", u",") 415 416 self._lexer = parser.Lexer(rule, _tokens, u"#") 417 self._lexer.escaping_on() 418 419 try: 420 while True: 421 if self._done: 422 break 423 424 if self._unget: 425 _tok = self._unget.pop() 426 else: 427 _res = self._lexer.lexer() 428 429 if _res is not None: 430 _tok = _res[1] 431 else: 432 _tok = _res 433 434 if _tok not in self.TOKENS: 435 _tok_type = self.TOKEN_ARG 436 else: 437 _tok_type = _tok 438 439 try: 440 _f, _arg = self._action.get(self._stack[-1], _tok_type) 441 except KeyError: 442 try: 443 _f, _arg = self._action.get(self._stack[-1], 444 self.TOKEN_DEFAULT) 445 except KeyError: 446 self.error(_tok) 447 448 _f(_tok, _arg) 449 450 except LexerError, e: 451 self.error(e)
452 453 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 454
455 - def _shift(self, token, state):
456 """ 457 Shift token and state onto stack 458 """ 459 460 self._stack.append(token) 461 self._stack.append(state) 462 463 if state == 6: # ( 464 _new = parser.lr.Tree() 465 self._exp_pointer.add(_new) 466 self._exp_stack.append(self._exp_pointer) 467 self._exp_pointer = _new 468 elif state == 19: # ) 469 if self._exp_stack: 470 self._exp_pointer = self._exp_stack.pop()
471 472 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 473
474 - def _reduce(self, token, rule):
475 """ 476 Reduce stack by rule 477 """ 478 _transform = { 479 u"type": self._type, 480 u"name": self._name, 481 u"iname": self._iname, 482 u"owner": self._owner, 483 u"perm": self._perm, 484 u"size": self._size, 485 u"link_type": self._type, 486 u"link_name": self._name, 487 u"link_iname": self._iname, 488 u"link_owner": self._owner, 489 u"link_perm": self._perm, 490 u"link_size": self._size, 491 } 492 493 try: 494 _ntok, _len = self._rules.get(rule) 495 except KeyError: 496 self.error(token) 497 498 if rule in (10, 11, 12, 13, 131): 499 self._cur_obj = Expression() 500 self._cur_obj.otype = self._stack[-2] 501 elif rule in (7, 14): 502 _arg = self._stack[-4] 503 _cur = self._cur_obj 504 505 if _cur.otype in _transform: 506 _cur.arg = _transform[_cur.otype](_arg) 507 elif _cur.otype in self.TRANSFORM_EXTENDED: 508 try: 509 _cur.arg = self.TRANSFORM_EXTENDED[_cur.otype](_arg) 510 except Exception, e: 511 self.error(_(u"Error in calling extended transformation "\ 512 u"function: %s") % ustring(str(e))) 513 else: 514 _cur.arg = _arg 515 516 if rule == 14: 517 self._cur_obj.negative = True 518 elif rule in (4, 5): 519 self._exp_pointer.add(self._cur_obj) 520 self._cur_obj = None 521 elif rule == 8: 522 self._exp_pointer.add("AND") 523 elif rule == 9: 524 self._exp_pointer.add("OR") 525 526 self._stack = self._stack[:(_len * -2)] 527 _top = self._stack[-1] 528 self._stack.append(_ntok) 529 530 try: 531 self._stack.append(self._goto.get(_top, _ntok)) 532 except KeyError: 533 self.error(token) 534 535 self._unget.append(token)
536 537 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 538
539 - def _accept(self, *args):
540 """ 541 Complete parsing 542 """ 543 544 self._done = True
545 546 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 547
548 - def _type(self, arg):
549 _types ={ 550 u"file": VFSTypeFile, 551 u"dir": VFSTypeDir, 552 u"link": VFSTypeLink, 553 u"socket": VFSTypeSocket, 554 u"fifo": VFSTypeFifo, 555 u"char": VFSTypeChar, 556 u"block": VFSTypeBlock, 557 } 558 559 try: 560 return _types[arg] 561 except KeyError: 562 self.error(_(u"Invalid type{} argument: %s" % arg))
563 564 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 565
566 - def _name(self, arg):
567 return re.compile(arg, re.U)
568 569 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 570
571 - def _iname(self, arg):
572 return re.compile(arg, re.U | re.I)
573 574 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 575
576 - def _owner(self, arg):
577 if not re.match(r"^(\w+)?(:(\w+))?$", arg): 578 self.error(_(u"Invalid owner{} argument: %s" % arg)) 579 580 _tmp = arg.split(":") 581 _uid = _tmp[0] 582 583 if _uid == "": 584 _uid = None 585 elif not _uid.isdigit(): 586 try: 587 _uid = pwd.getpwnam(_uid).pw_uid 588 except (KeyError, TypeError): 589 self.error(_(u"Invalid uid: %s" % _uid)) 590 else: 591 _uid = int(_uid) 592 593 if len(_tmp) > 1: 594 _gid = _tmp[1] 595 596 if not _gid.isdigit(): 597 try: 598 _gid = grp.getgrnam(_gid).gr_gid 599 except (KeyError, TypeError): 600 self.error(_(u"Invalid gid: %s" % _gid)) 601 else: 602 _gid = int(_gid) 603 else: 604 _gid = None 605 606 return (_uid, _gid)
607 608 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 609
610 - def _perm(self, arg):
611 _any = False 612 613 if not re.match(r"^\+?\d{4}$", arg): 614 self.error(_(u"Invalid perm{} argument: %s" % arg)) 615 616 if arg.startswith(u"+"): 617 _any = True 618 _perm = int(arg[1:], 8) 619 else: 620 _perm = int(arg, 8) 621 622 return (_any, _perm)
623 624 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 625
626 - def _size(self, arg):
627 _bytes = { 628 u"B": 1, 629 u"K": 1024, 630 u"M": 1024 * 1024, 631 u"G": 1024 * 1024 * 1024, 632 u"T": 1024 * 1024 * 1024 * 1024, 633 } 634 635 _re = re.match(r"^\s*([<>]?\=?)\s*(\d+)\s*([BbKkMmGgTt]?)\s*$", arg) 636 637 if _re is None: 638 self.error(_(u"Invalid size{} argument: %s") % arg) 639 else: 640 _op = _re.group(1) or u"=" 641 _size = long(_re.group(2)) 642 _mod = _re.group(3) or None 643 644 if _mod is not None: 645 _size *= _bytes[_mod.upper()] 646 647 return (_op, _size)
648 661 662 return _trans 663
664 #++++++++++++++++++++++++++++++++++++++++++++++++ 665 666 -class Expression(object):
667 """ 668 FS rule expression class 669 """ 670 671 MATCH_EXTENDED = {} 672 673 @classmethod
674 - def extend(cls, token, match_func):
675 cls.MATCH_EXTENDED[token] = match_func
676 677 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 678 679 @classmethod
680 - def unextend(cls, token):
681 try: 682 del(cls.MATCH_EXTENDED[token]) 683 except KeyError: 684 pass
685 686 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 687
688 - def __init__(self):
689 self.otype = None 690 self.arg = None 691 self.negative = False
692 693 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 694
695 - def match(self, vfsobj):
696 """ 697 Check if object matches the rule 698 """ 699 700 def _match_type(obj, arg): 701 return isinstance(obj.ftype, arg)
702 703 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 704 705 def _match_name(obj, arg): 706 if arg.search(obj.name) is None: 707 return False 708 else: 709 return True
710 711 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 712 713 def _match_iname(obj, arg): 714 if arg.search(obj.name) is None: 715 return False 716 else: 717 return True 718 719 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 720 721 def _match_owner(obj, arg): 722 if arg[0] is not None and arg[1] is not None: 723 if (obj.uid, obj.gid) == arg: 724 return True 725 elif arg[0] is not None and obj.uid == arg[0]: 726 return True 727 elif arg[1] is not None and obj.gid == arg[1]: 728 return True 729 730 return False 731 732 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 733 734 def _match_perm(obj, arg): 735 if obj.mode is None: 736 return False 737 738 _any, _m = arg 739 _mode = stat.S_IMODE(obj.mode.raw) 740 741 if not _any and _mode == _m: 742 return True 743 elif _mode & _m: 744 return True 745 746 return False 747 748 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 749 750 def _match_size(obj, args): 751 if obj.size is None: 752 return False 753 754 _op, _size = args 755 756 _data = {u">": lambda x, y: x > y, 757 u">=": lambda x, y: x >= y, 758 u"<": lambda x, y: x < y, 759 u"<=": lambda x, y: x <= y, 760 u"=": lambda x, y: x == y, 761 } 762 763 if _op in _data and _data[_op](obj.size, _size): 764 return True 765 766 return False 767 768 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 769 770 def _match_link_exists(obj, arg): 771 if isinstance(obj.ftype, VFSTypeLink) and obj.data is not None: 772 return True 773 else: 774 return False 775 776 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 777 778 _match_link_type = link(_match_type) 779 _match_link_name = link(_match_name) 780 _match_link_iname = link(_match_iname) 781 _match_link_owner = link(_match_owner) 782 _match_link_perm = link(_match_perm) 783 _match_link_size = link(_match_size) 784 785 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 786 787 _match_f = { 788 u"type": _match_type, 789 u"name": _match_name, 790 u"iname": _match_iname, 791 u"owner": _match_owner, 792 u"perm": _match_perm, 793 u"size": _match_size, 794 u"link_type": _match_link_type, 795 u"link_name": _match_link_name, 796 u"link_iname": _match_link_iname, 797 u"link_owner": _match_link_owner, 798 u"link_perm": _match_link_perm, 799 u"link_exists": _match_link_exists, 800 u"link_size": _match_link_size, 801 } 802 803 if self.otype in _match_f: 804 _res = _match_f[self.otype](vfsobj, self.arg) 805 elif self.otype in self.MATCH_EXTENDED: 806 try: 807 _res = self.MATCH_EXTENDED[self.otype](vfsobj, self.arg) 808 except Exception, e: 809 self.error(_(u"Error in calling extended match "\ 810 u"function: %s") % ustring(str(e))) 811 else: 812 raise FSRuleError(_(u"Unable to find match function for token: %s") 813 % self.otype) 814 815 if self.negative: 816 return not _res 817 else: 818 return _res 819 820 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 821
822 - def __str__(self):
823 return "<FSRule expression: %s, %s, %s>" % \ 824 (self.otype, str(self.arg), str(self.negative))
825 826 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 827
828 - def __repr__(self):
829 return self.__str__()
830