blob: 1570e5130d4bf3b6bc770727b97b35584004b102 [file] [log] [blame]
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -08001#!/usr/bin/env python3
2# -*- coding: utf-8 -*-
3#
4# Copyright (C) 2018 The Android Open Source Project
5#
6# Licensed under the Apache License, Version 2.0 (the "License");
7# you may not use this file except in compliance with the License.
8# You may obtain a copy of the License at
9#
10# http://www.apache.org/licenses/LICENSE-2.0
11#
12# Unless required by applicable law or agreed to in writing, software
13# distributed under the License is distributed on an "AS IS" BASIS,
14# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15# See the License for the specific language governing permissions and
16# limitations under the License.
17"""Unittests for the parser module."""
18
19from __future__ import absolute_import
20from __future__ import division
21from __future__ import print_function
22
23import os
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -080024import shutil
25import tempfile
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -080026import unittest
27
28import arch
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -080029import bpf
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -080030import parser # pylint: disable=wrong-import-order
31
32ARCH_64 = arch.Arch.load_from_json(
33 os.path.join(
34 os.path.dirname(os.path.abspath(__file__)), 'testdata/arch_64.json'))
35
36
37class TokenizerTests(unittest.TestCase):
38 """Tests for ParserState.tokenize."""
39
40 @staticmethod
41 def _tokenize(line):
42 parser_state = parser.ParserState('<memory>')
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -070043 return list(parser_state.tokenize([line]))[0]
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -080044
45 def test_tokenize(self):
46 """Accept valid tokens."""
47 self.assertEqual([
48 (token.type, token.value)
49 for token in TokenizerTests._tokenize('@include /minijail.policy')
50 ], [
51 ('INCLUDE', '@include'),
52 ('PATH', '/minijail.policy'),
53 ])
54 self.assertEqual([
55 (token.type, token.value)
56 for token in TokenizerTests._tokenize('@include ./minijail.policy')
57 ], [
58 ('INCLUDE', '@include'),
59 ('PATH', './minijail.policy'),
60 ])
61 self.assertEqual(
62 [(token.type, token.value) for token in TokenizerTests._tokenize(
63 'read: arg0 in ~0xffff || arg0 & (1|2) && arg0 == 0o755; '
64 'return ENOSYS # ignored')], [
65 ('IDENTIFIER', 'read'),
66 ('COLON', ':'),
67 ('ARGUMENT', 'arg0'),
68 ('OP', 'in'),
69 ('BITWISE_COMPLEMENT', '~'),
70 ('NUMERIC_CONSTANT', '0xffff'),
71 ('OR', '||'),
72 ('ARGUMENT', 'arg0'),
73 ('OP', '&'),
74 ('LPAREN', '('),
75 ('NUMERIC_CONSTANT', '1'),
76 ('BITWISE_OR', '|'),
77 ('NUMERIC_CONSTANT', '2'),
78 ('RPAREN', ')'),
79 ('AND', '&&'),
80 ('ARGUMENT', 'arg0'),
81 ('OP', '=='),
82 ('NUMERIC_CONSTANT', '0o755'),
83 ('SEMICOLON', ';'),
84 ('RETURN', 'return'),
85 ('IDENTIFIER', 'ENOSYS'),
86 ])
Luis Hector Chavez080ceef2019-11-15 02:56:45 -080087 # Ensure that tokens that have an otherwise valid token as prefix are
88 # still matched correctly.
89 self.assertEqual([
90 (token.type, token.value)
91 for token in TokenizerTests._tokenize(
92 'inotify_wait return_sys killall trace_sys')
93 ], [
94 ('IDENTIFIER', 'inotify_wait'),
95 ('IDENTIFIER', 'return_sys'),
96 ('IDENTIFIER', 'killall'),
97 ('IDENTIFIER', 'trace_sys'),
98 ])
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -080099
100 def test_tokenize_invalid_token(self):
101 """Reject tokenizer errors."""
102 with self.assertRaisesRegex(parser.ParseException,
103 (r'<memory>\(1:1\): invalid token\n'
104 r' %invalid-token%\n'
105 r' \^')):
106 TokenizerTests._tokenize('%invalid-token%')
107
108
109class ParseConstantTests(unittest.TestCase):
110 """Tests for PolicyParser.parse_value."""
111
112 def setUp(self):
113 self.arch = ARCH_64
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800114 self.parser = parser.PolicyParser(
115 self.arch, kill_action=bpf.KillProcess())
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -0800116
117 def _tokenize(self, line):
118 # pylint: disable=protected-access
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -0700119 return list(self.parser._parser_state.tokenize([line]))[0]
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -0800120
121 def test_parse_constant_unsigned(self):
122 """Accept reasonably-sized unsigned constants."""
123 self.assertEqual(
124 self.parser.parse_value(self._tokenize('0x80000000')), 0x80000000)
125 if self.arch.bits == 64:
126 self.assertEqual(
127 self.parser.parse_value(self._tokenize('0x8000000000000000')),
128 0x8000000000000000)
129
130 def test_parse_constant_unsigned_too_big(self):
131 """Reject unreasonably-sized unsigned constants."""
132 if self.arch.bits == 32:
133 with self.assertRaisesRegex(parser.ParseException,
134 'unsigned overflow'):
135 self.parser.parse_value(self._tokenize('0x100000000'))
136 with self.assertRaisesRegex(parser.ParseException,
137 'unsigned overflow'):
138 self.parser.parse_value(self._tokenize('0x10000000000000000'))
139
140 def test_parse_constant_signed(self):
141 """Accept reasonably-sized signed constants."""
142 self.assertEqual(
143 self.parser.parse_value(self._tokenize('-1')),
144 self.arch.max_unsigned)
145
146 def test_parse_constant_signed_too_negative(self):
147 """Reject unreasonably-sized signed constants."""
148 if self.arch.bits == 32:
149 with self.assertRaisesRegex(parser.ParseException,
150 'signed underflow'):
151 self.parser.parse_value(self._tokenize('-0x800000001'))
152 with self.assertRaisesRegex(parser.ParseException, 'signed underflow'):
153 self.parser.parse_value(self._tokenize('-0x8000000000000001'))
154
155 def test_parse_mask(self):
156 """Accept parsing a mask value."""
157 self.assertEqual(
158 self.parser.parse_value(self._tokenize('0x1|0x2|0x4|0x8')), 0xf)
159
160 def test_parse_parenthesized_expressions(self):
161 """Accept parsing parenthesized expressions."""
162 bad_expressions = [
163 '(1',
164 '|(1)',
165 '(1)|',
166 '()',
167 '(',
168 '((',
169 '(()',
170 '(()1',
171 ]
172 for expression in bad_expressions:
173 with self.assertRaises(parser.ParseException, msg=expression):
174 self.parser.parse_value(self._tokenize(expression))
175
176 bad_partial_expressions = [
177 '1)',
178 '(1)1',
179 '1(0)',
180 ]
181 for expression in bad_partial_expressions:
182 tokens = self._tokenize(expression)
183 self.parser.parse_value(tokens)
184 self.assertNotEqual(tokens, [])
185
186 good_expressions = [
187 '(3)',
188 '(1)|2',
189 '1|(2)',
190 '(1)|(2)',
191 '((3))',
192 '0|(1|2)',
193 '(0|1|2)',
194 ]
195 for expression in good_expressions:
196 self.assertEqual(
197 self.parser.parse_value(self._tokenize(expression)), 3)
198
199 def test_parse_constant_complements(self):
200 """Accept complementing constants."""
201 self.assertEqual(
202 self.parser.parse_value(self._tokenize('~0')),
203 self.arch.max_unsigned)
204 self.assertEqual(
205 self.parser.parse_value(self._tokenize('~0|~0')),
206 self.arch.max_unsigned)
207 if self.arch.bits == 32:
208 self.assertEqual(
209 self.parser.parse_value(
210 self._tokenize('~0x005AF0FF|~0xFFA50FFF')), 0xFFFFFF00)
211 self.assertEqual(
212 self.parser.parse_value(
213 self._tokenize('0x0F|~(0x005AF000|0x00A50FFF)|0xF0')),
214 0xFF0000FF)
215 else:
216 self.assertEqual(
217 self.parser.parse_value(
218 self._tokenize('~0x00005A5AF0F0FFFF|~0xFFFFA5A50F0FFFFF')),
219 0xFFFFFFFFFFFF0000)
220 self.assertEqual(
221 self.parser.parse_value(
222 self._tokenize(
223 '0x00FF|~(0x00005A5AF0F00000|0x0000A5A50F0FFFFF)|0xFF00'
224 )), 0xFFFF00000000FFFF)
225
226 def test_parse_double_complement(self):
227 """Reject double-complementing constants."""
228 with self.assertRaisesRegex(parser.ParseException,
229 'double complement'):
230 self.parser.parse_value(self._tokenize('~~0'))
231
232 def test_parse_empty_complement(self):
233 """Reject complementing nothing."""
234 with self.assertRaisesRegex(parser.ParseException, 'empty complement'):
235 self.parser.parse_value(self._tokenize('0|~'))
236
237 def test_parse_named_constant(self):
238 """Accept parsing a named constant."""
239 self.assertEqual(
240 self.parser.parse_value(self._tokenize('O_RDONLY')), 0)
241
242 def test_parse_empty_constant(self):
243 """Reject parsing nothing."""
244 with self.assertRaisesRegex(parser.ParseException, 'empty constant'):
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -0700245 self.parser.parse_value([])
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -0800246 with self.assertRaisesRegex(parser.ParseException, 'empty constant'):
247 self.parser.parse_value(self._tokenize('0|'))
248
Luis Hector Chavez89a27102019-03-09 18:45:52 -0800249 def test_parse_invalid_constant(self):
250 """Reject parsing invalid constants."""
251 with self.assertRaisesRegex(parser.ParseException, 'invalid constant'):
252 self.parser.parse_value(self._tokenize('foo'))
253
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -0800254
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800255class ParseFilterExpressionTests(unittest.TestCase):
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800256 """Tests for PolicyParser.parse_argument_expression."""
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800257
258 def setUp(self):
259 self.arch = ARCH_64
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800260 self.parser = parser.PolicyParser(
261 self.arch, kill_action=bpf.KillProcess())
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800262
263 def _tokenize(self, line):
264 # pylint: disable=protected-access
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -0700265 return list(self.parser._parser_state.tokenize([line]))[0]
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800266
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800267 def test_parse_argument_expression(self):
268 """Accept valid argument expressions."""
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800269 self.assertEqual(
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800270 self.parser.parse_argument_expression(
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800271 self._tokenize(
272 'arg0 in 0xffff || arg0 == PROT_EXEC && arg1 == PROT_WRITE'
273 )), [
274 [parser.Atom(0, 'in', 0xffff)],
275 [parser.Atom(0, '==', 4),
276 parser.Atom(1, '==', 2)],
277 ])
278
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800279 def test_parse_empty_argument_expression(self):
280 """Reject empty argument expressions."""
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800281 with self.assertRaisesRegex(parser.ParseException,
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800282 'empty argument expression'):
283 self.parser.parse_argument_expression(
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800284 self._tokenize('arg0 in 0xffff ||'))
285
286 def test_parse_empty_clause(self):
287 """Reject empty clause."""
288 with self.assertRaisesRegex(parser.ParseException, 'empty clause'):
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800289 self.parser.parse_argument_expression(
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800290 self._tokenize('arg0 in 0xffff &&'))
291
292 def test_parse_invalid_argument(self):
293 """Reject invalid argument."""
294 with self.assertRaisesRegex(parser.ParseException, 'invalid argument'):
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800295 self.parser.parse_argument_expression(
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800296 self._tokenize('argX in 0xffff'))
297
298 def test_parse_invalid_operator(self):
299 """Reject invalid operator."""
300 with self.assertRaisesRegex(parser.ParseException, 'invalid operator'):
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800301 self.parser.parse_argument_expression(
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800302 self._tokenize('arg0 = 0xffff'))
303
Luis Hector Chavez89a27102019-03-09 18:45:52 -0800304 def test_parse_missing_operator(self):
305 """Reject missing operator."""
306 with self.assertRaisesRegex(parser.ParseException, 'missing operator'):
307 self.parser.parse_argument_expression(self._tokenize('arg0'))
308
309 def test_parse_missing_operand(self):
310 """Reject missing operand."""
311 with self.assertRaisesRegex(parser.ParseException, 'empty constant'):
312 self.parser.parse_argument_expression(self._tokenize('arg0 =='))
313
Luis Hector Chavez0516e182018-12-04 20:36:00 -0800314
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800315class ParseFilterTests(unittest.TestCase):
316 """Tests for PolicyParser.parse_filter."""
317
318 def setUp(self):
319 self.arch = ARCH_64
320 self.parser = parser.PolicyParser(
321 self.arch, kill_action=bpf.KillProcess())
322
323 def _tokenize(self, line):
324 # pylint: disable=protected-access
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -0700325 return list(self.parser._parser_state.tokenize([line]))[0]
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800326
327 def test_parse_filter(self):
328 """Accept valid filters."""
329 self.assertEqual(
330 self.parser.parse_filter(self._tokenize('arg0 == 0')), [
331 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
332 ])
333 self.assertEqual(
334 self.parser.parse_filter(self._tokenize('kill-process')), [
335 parser.Filter(None, bpf.KillProcess()),
336 ])
337 self.assertEqual(
338 self.parser.parse_filter(self._tokenize('kill-thread')), [
339 parser.Filter(None, bpf.KillThread()),
340 ])
341 self.assertEqual(
342 self.parser.parse_filter(self._tokenize('trap')), [
343 parser.Filter(None, bpf.Trap()),
344 ])
345 self.assertEqual(
346 self.parser.parse_filter(self._tokenize('return ENOSYS')), [
347 parser.Filter(None,
348 bpf.ReturnErrno(self.arch.constants['ENOSYS'])),
349 ])
350 self.assertEqual(
351 self.parser.parse_filter(self._tokenize('trace')), [
352 parser.Filter(None, bpf.Trace()),
353 ])
354 self.assertEqual(
Luis Héctor Chávez59a64492021-01-03 05:46:47 -0800355 self.parser.parse_filter(self._tokenize('user-notify')), [
356 parser.Filter(None, bpf.UserNotify()),
357 ])
358 self.assertEqual(
Luis Hector Chavez1ac9eca2018-12-04 21:28:52 -0800359 self.parser.parse_filter(self._tokenize('log')), [
360 parser.Filter(None, bpf.Log()),
361 ])
362 self.assertEqual(
363 self.parser.parse_filter(self._tokenize('allow')), [
364 parser.Filter(None, bpf.Allow()),
365 ])
366 self.assertEqual(
367 self.parser.parse_filter(self._tokenize('1')), [
368 parser.Filter(None, bpf.Allow()),
369 ])
370 self.assertEqual(
371 self.parser.parse_filter(
372 self._tokenize(
373 '{ arg0 == 0, arg0 == 1; return ENOSYS, trap }')),
374 [
375 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
376 parser.Filter([[parser.Atom(0, '==', 1)]],
377 bpf.ReturnErrno(self.arch.constants['ENOSYS'])),
378 parser.Filter(None, bpf.Trap()),
379 ])
380
381 def test_parse_missing_return_value(self):
382 """Reject missing return value."""
383 with self.assertRaisesRegex(parser.ParseException,
384 'missing return value'):
385 self.parser.parse_filter(self._tokenize('return'))
386
387 def test_parse_invalid_return_value(self):
388 """Reject invalid return value."""
389 with self.assertRaisesRegex(parser.ParseException, 'invalid constant'):
390 self.parser.parse_filter(self._tokenize('return arg0'))
391
392 def test_parse_unclosed_brace(self):
393 """Reject unclosed brace."""
394 with self.assertRaisesRegex(parser.ParseException, 'unclosed brace'):
395 self.parser.parse_filter(self._tokenize('{ allow'))
396
397
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800398class ParseFilterStatementTests(unittest.TestCase):
399 """Tests for PolicyParser.parse_filter_statement."""
400
401 def setUp(self):
402 self.arch = ARCH_64
403 self.parser = parser.PolicyParser(
404 self.arch, kill_action=bpf.KillProcess())
405
406 def _tokenize(self, line):
407 # pylint: disable=protected-access
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -0700408 return list(self.parser._parser_state.tokenize([line]))[0]
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800409
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800410 def assertEqualIgnoringToken(self, actual, expected, msg=None):
411 """Similar to assertEqual, but ignores the token field."""
412 if (actual.syscalls != expected.syscalls or
413 actual.filters != expected.filters):
414 self.fail('%r != %r' % (actual, expected), msg)
415
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800416 def test_parse_filter_statement(self):
417 """Accept valid filter statements."""
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800418 self.assertEqualIgnoringToken(
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800419 self.parser.parse_filter_statement(
420 self._tokenize('read: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800421 parser.ParsedFilterStatement(
422 syscalls=(parser.Syscall('read', 0), ),
423 filters=[
424 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
425 ],
426 token=None))
427 self.assertEqualIgnoringToken(
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800428 self.parser.parse_filter_statement(
429 self._tokenize('{read, write}: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800430 parser.ParsedFilterStatement(
431 syscalls=(
432 parser.Syscall('read', 0),
433 parser.Syscall('write', 1),
434 ),
435 filters=[
436 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
437 ],
438 token=None))
439 self.assertEqualIgnoringToken(
Luis Hector Chavez524da3b2019-03-05 16:44:08 -0800440 self.parser.parse_filter_statement(
441 self._tokenize('io@libc: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800442 parser.ParsedFilterStatement(
443 syscalls=(
444 parser.Syscall('read', 0),
445 parser.Syscall('write', 1),
446 ),
447 filters=[
448 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
449 ],
450 token=None))
451 self.assertEqualIgnoringToken(
Luis Hector Chavez524da3b2019-03-05 16:44:08 -0800452 self.parser.parse_filter_statement(
453 self._tokenize('file-io@systemd: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800454 parser.ParsedFilterStatement(
455 syscalls=(
456 parser.Syscall('read', 0),
457 parser.Syscall('write', 1),
458 ),
459 filters=[
460 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
461 ],
462 token=None))
463 self.assertEqualIgnoringToken(
Matt Delcof68fc8d2019-11-14 16:47:52 -0800464 self.parser.parse_filter_statement(
465 self._tokenize('kill: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800466 parser.ParsedFilterStatement(
467 syscalls=(
468 parser.Syscall('kill', 62),
469 ),
470 filters=[
471 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
472 ],
473 token=None))
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800474
Luis Hector Chavez571e9582018-12-05 09:44:00 -0800475 def test_parse_metadata(self):
476 """Accept valid filter statements with metadata."""
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800477 self.assertEqualIgnoringToken(
Luis Hector Chavez571e9582018-12-05 09:44:00 -0800478 self.parser.parse_filter_statement(
479 self._tokenize('read[arch=test]: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800480 parser.ParsedFilterStatement(
481 syscalls=(
482 parser.Syscall('read', 0),
483 ),
484 filters=[
485 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
486 ],
487 token=None))
488 self.assertEqualIgnoringToken(
Luis Hector Chavez571e9582018-12-05 09:44:00 -0800489 self.parser.parse_filter_statement(
490 self._tokenize(
491 '{read, nonexistent[arch=nonexistent]}: arg0 == 0')),
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800492 parser.ParsedFilterStatement(
493 syscalls=(
494 parser.Syscall('read', 0),
495 ),
496 filters=[
497 parser.Filter([[parser.Atom(0, '==', 0)]], bpf.Allow()),
498 ],
499 token=None))
Luis Hector Chavez571e9582018-12-05 09:44:00 -0800500
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800501 def test_parse_unclosed_brace(self):
502 """Reject unclosed brace."""
503 with self.assertRaisesRegex(parser.ParseException, 'unclosed brace'):
Luis Hector Chavez524da3b2019-03-05 16:44:08 -0800504 self.parser.parse_filter(self._tokenize('{ allow'))
505
506 def test_parse_invalid_syscall_group(self):
507 """Reject invalid syscall groups."""
508 with self.assertRaisesRegex(parser.ParseException, 'unclosed brace'):
Luis Hector Chavez75406c22018-12-04 21:57:06 -0800509 self.parser.parse_filter_statement(
510 self._tokenize('{ read, write: arg0 == 0'))
511
512 def test_parse_missing_colon(self):
513 """Reject missing colon."""
514 with self.assertRaisesRegex(parser.ParseException, 'missing colon'):
515 self.parser.parse_filter_statement(self._tokenize('read'))
516
517 def test_parse_invalid_colon(self):
518 """Reject invalid colon."""
519 with self.assertRaisesRegex(parser.ParseException, 'invalid colon'):
520 self.parser.parse_filter_statement(self._tokenize('read arg0'))
521
522 def test_parse_missing_filter(self):
523 """Reject missing filter."""
524 with self.assertRaisesRegex(parser.ParseException, 'missing filter'):
525 self.parser.parse_filter_statement(self._tokenize('read:'))
526
527
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -0800528class ParseFileTests(unittest.TestCase):
529 """Tests for PolicyParser.parse_file."""
530
531 def setUp(self):
532 self.arch = ARCH_64
533 self.parser = parser.PolicyParser(
534 self.arch, kill_action=bpf.KillProcess())
535 self.tempdir = tempfile.mkdtemp()
536
537 def tearDown(self):
538 shutil.rmtree(self.tempdir)
539
540 def _write_file(self, filename, contents):
541 """Helper to write out a file for testing."""
542 path = os.path.join(self.tempdir, filename)
543 with open(path, 'w') as outf:
544 outf.write(contents)
545 return path
546
547 def test_parse_simple(self):
548 """Allow simple policy files."""
549 path = self._write_file(
550 'test.policy', """
551 # Comment.
552 read: allow
553 write: allow
554 """)
555
556 self.assertEqual(
557 self.parser.parse_file(path),
558 parser.ParsedPolicy(
559 default_action=bpf.KillProcess(),
560 filter_statements=[
561 parser.FilterStatement(
562 syscall=parser.Syscall('read', 0),
563 frequency=1,
564 filters=[
565 parser.Filter(None, bpf.Allow()),
566 ]),
567 parser.FilterStatement(
568 syscall=parser.Syscall('write', 1),
569 frequency=1,
570 filters=[
571 parser.Filter(None, bpf.Allow()),
572 ]),
573 ]))
574
Luis Hector Chavezd0fd13d2019-03-17 19:01:52 -0700575 def test_parse_multiline(self):
576 """Allow simple multi-line policy files."""
577 path = self._write_file(
578 'test.policy', """
579 # Comment.
580 read: \
581 allow
582 write: allow
583 """)
584
585 self.assertEqual(
586 self.parser.parse_file(path),
587 parser.ParsedPolicy(
588 default_action=bpf.KillProcess(),
589 filter_statements=[
590 parser.FilterStatement(
591 syscall=parser.Syscall('read', 0),
592 frequency=1,
593 filters=[
594 parser.Filter(None, bpf.Allow()),
595 ]),
596 parser.FilterStatement(
597 syscall=parser.Syscall('write', 1),
598 frequency=1,
599 filters=[
600 parser.Filter(None, bpf.Allow()),
601 ]),
602 ]))
603
Luis Hector Chavezb7926be2018-12-05 16:54:26 -0800604 def test_parse_default(self):
605 """Allow defining a default action."""
606 path = self._write_file(
607 'test.policy', """
608 @default kill-thread
609 read: allow
610 """)
611
612 self.assertEqual(
613 self.parser.parse_file(path),
614 parser.ParsedPolicy(
615 default_action=bpf.KillThread(),
616 filter_statements=[
617 parser.FilterStatement(
618 syscall=parser.Syscall('read', 0),
619 frequency=1,
620 filters=[
621 parser.Filter(None, bpf.Allow()),
622 ]),
623 ]))
624
625 def test_parse_default_permissive(self):
626 """Reject defining a permissive default action."""
627 path = self._write_file(
628 'test.policy', """
629 @default log
630 read: allow
631 """)
632
633 with self.assertRaisesRegex(parser.ParseException,
634 r'invalid permissive default action'):
635 self.parser.parse_file(path)
636
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -0800637 def test_parse_simple_grouped(self):
638 """Allow simple policy files."""
639 path = self._write_file(
640 'test.policy', """
641 # Comment.
642 {read, write}: allow
643 """)
644
645 self.assertEqual(
646 self.parser.parse_file(path),
647 parser.ParsedPolicy(
648 default_action=bpf.KillProcess(),
649 filter_statements=[
650 parser.FilterStatement(
651 syscall=parser.Syscall('read', 0),
652 frequency=1,
653 filters=[
654 parser.Filter(None, bpf.Allow()),
655 ]),
656 parser.FilterStatement(
657 syscall=parser.Syscall('write', 1),
658 frequency=1,
659 filters=[
660 parser.Filter(None, bpf.Allow()),
661 ]),
662 ]))
663
Luis Hector Chavez89a27102019-03-09 18:45:52 -0800664 def test_parse_other_arch(self):
665 """Allow entries that only target another architecture."""
666 path = self._write_file(
667 'test.policy', """
668 # Comment.
669 read[arch=nonexistent]: allow
670 write: allow
671 """)
672
673 self.assertEqual(
674 self.parser.parse_file(path),
675 parser.ParsedPolicy(
676 default_action=bpf.KillProcess(),
677 filter_statements=[
678 parser.FilterStatement(
679 syscall=parser.Syscall('write', 1),
680 frequency=1,
681 filters=[
682 parser.Filter(None, bpf.Allow()),
683 ]),
684 ]))
685
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -0800686 def test_parse_include(self):
687 """Allow including policy files."""
688 path = self._write_file(
689 'test.include.policy', """
690 {read, write}: arg0 == 0; allow
691 """)
692 path = self._write_file(
693 'test.policy', """
694 @include ./test.include.policy
695 read: return ENOSYS
696 """)
697
698 self.assertEqual(
699 self.parser.parse_file(path),
700 parser.ParsedPolicy(
701 default_action=bpf.KillProcess(),
702 filter_statements=[
703 parser.FilterStatement(
704 syscall=parser.Syscall('read', 0),
705 frequency=1,
706 filters=[
707 parser.Filter([[parser.Atom(0, '==', 0)]],
708 bpf.Allow()),
709 parser.Filter(
710 None,
711 bpf.ReturnErrno(
712 self.arch.constants['ENOSYS'])),
713 ]),
714 parser.FilterStatement(
715 syscall=parser.Syscall('write', 1),
716 frequency=1,
717 filters=[
718 parser.Filter([[parser.Atom(0, '==', 0)]],
719 bpf.Allow()),
720 parser.Filter(None, bpf.KillProcess()),
721 ]),
722 ]))
723
Luis Hector Chavez89a27102019-03-09 18:45:52 -0800724 def test_parse_invalid_include(self):
725 """Reject including invalid policy files."""
726 with self.assertRaisesRegex(parser.ParseException,
727 r'empty include path'):
728 path = self._write_file(
729 'test.policy', """
730 @include
731 """)
732 self.parser.parse_file(path)
733
734 with self.assertRaisesRegex(parser.ParseException,
735 r'invalid include path'):
736 path = self._write_file(
737 'test.policy', """
738 @include arg0
739 """)
740 self.parser.parse_file(path)
741
742 with self.assertRaisesRegex(parser.ParseException,
743 r'@include statement nested too deep'):
744 path = self._write_file(
745 'test.policy', """
746 @include ./test.policy
747 """)
748 self.parser.parse_file(path)
749
750 with self.assertRaisesRegex(parser.ParseException,
751 r'Could not @include .*'):
752 path = self._write_file(
753 'test.policy', """
754 @include ./nonexistent.policy
755 """)
756 self.parser.parse_file(path)
757
Luis Hector Chavezb4408862018-12-05 16:54:16 -0800758 def test_parse_frequency(self):
759 """Allow including frequency files."""
760 self._write_file(
761 'test.frequency', """
762 read: 2
763 write: 3
764 """)
765 path = self._write_file(
766 'test.policy', """
767 @frequency ./test.frequency
768 read: allow
769 """)
770
771 self.assertEqual(
772 self.parser.parse_file(path),
773 parser.ParsedPolicy(
774 default_action=bpf.KillProcess(),
775 filter_statements=[
776 parser.FilterStatement(
777 syscall=parser.Syscall('read', 0),
778 frequency=2,
779 filters=[
780 parser.Filter(None, bpf.Allow()),
781 ]),
782 ]))
783
Luis Hector Chavez89a27102019-03-09 18:45:52 -0800784 def test_parse_invalid_frequency(self):
785 """Reject including invalid frequency files."""
786 path = self._write_file('test.policy',
787 """@frequency ./test.frequency""")
788
789 with self.assertRaisesRegex(parser.ParseException, r'missing colon'):
790 self._write_file('test.frequency', """
791 read
792 """)
793 self.parser.parse_file(path)
794
795 with self.assertRaisesRegex(parser.ParseException, r'invalid colon'):
796 self._write_file('test.frequency', """
797 read foo
798 """)
799 self.parser.parse_file(path)
800
801 with self.assertRaisesRegex(parser.ParseException, r'missing number'):
802 self._write_file('test.frequency', """
803 read:
804 """)
805 self.parser.parse_file(path)
806
807 with self.assertRaisesRegex(parser.ParseException, r'invalid number'):
808 self._write_file('test.frequency', """
809 read: foo
810 """)
811 self.parser.parse_file(path)
812
813 with self.assertRaisesRegex(parser.ParseException, r'invalid number'):
814 self._write_file('test.frequency', """
815 read: -1
816 """)
817 self.parser.parse_file(path)
818
819 with self.assertRaisesRegex(parser.ParseException,
820 r'empty frequency path'):
821 path = self._write_file(
822 'test.policy', """
823 @frequency
824 """)
825 self.parser.parse_file(path)
826
827 with self.assertRaisesRegex(parser.ParseException,
828 r'invalid frequency path'):
829 path = self._write_file(
830 'test.policy', """
831 @frequency arg0
832 """)
833 self.parser.parse_file(path)
834
835 with self.assertRaisesRegex(parser.ParseException,
836 r'Could not open frequency file.*'):
837 path = self._write_file(
838 'test.policy', """
839 @frequency ./nonexistent.frequency
840 """)
841 self.parser.parse_file(path)
842
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -0800843 def test_parse_multiple_unconditional(self):
844 """Reject actions after an unconditional action."""
845 path = self._write_file(
846 'test.policy', """
847 read: allow
848 read: allow
849 """)
850
851 with self.assertRaisesRegex(
852 parser.ParseException,
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800853 (r'test.policy\(3:17\): '
854 r'Syscall read.*already had an unconditional action '
855 r'applied')):
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -0800856 self.parser.parse_file(path)
857
858 path = self._write_file(
859 'test.policy', """
860 read: log
861 read: arg0 == 0; log
862 """)
863
864 with self.assertRaisesRegex(
865 parser.ParseException,
Luis Hector Chavez4228eff2019-12-11 19:07:13 -0800866 (r'test.policy\(3:17\): '
867 r'Syscall read.*already had an unconditional action '
868 r'applied')):
Luis Hector Chavez5dfe9192018-12-04 22:38:43 -0800869 self.parser.parse_file(path)
870
871
Luis Hector Chavezd4ce4492018-12-04 20:00:32 -0800872if __name__ == '__main__':
873 unittest.main()