summaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorMartin <martin@leo.fri1.uni-lj.si>2015-09-18 14:03:19 +0200
committerMartin <martin@leo.fri1.uni-lj.si>2015-09-18 14:03:19 +0200
commitfe545bd1c782f5228323d360181d7aeccfce0324 (patch)
tree0c017fb231d568e3e26b2418d700daf060b1d915 /python
parente719ad4812fd4aaf05712022992966b805f5bd31 (diff)
parent1720db308bf4481d6be45d4f7f611bab576b1184 (diff)
Merge branch 'master' of ssh://212.235.189.51:22122/codeq-server
Diffstat (limited to 'python')
-rwxr-xr-xpython/interpreter.py40
-rwxr-xr-xpython/runner/interpreter.py8
-rwxr-xr-xpython/runner/main.py117
3 files changed, 40 insertions, 125 deletions
diff --git a/python/interpreter.py b/python/interpreter.py
new file mode 100755
index 0000000..87de3aa
--- /dev/null
+++ b/python/interpreter.py
@@ -0,0 +1,40 @@
+#!/usr/bin/python3
+
+import code
+import sys
+
+import seccomp
+
+f = seccomp.SyscallFilter(defaction=seccomp.KILL)
+# Necessary for Python.
+f.add_rule(seccomp.ALLOW, "exit_group")
+f.add_rule(seccomp.ALLOW, "rt_sigaction")
+f.add_rule(seccomp.ALLOW, "brk")
+
+# Mostly harmless.
+f.add_rule(seccomp.ALLOW, "mprotect")
+
+# Allow reading from stdin and writing to stdout/stderr.
+f.add_rule(seccomp.ALLOW, "read", seccomp.Arg(0, seccomp.EQ, sys.stdin.fileno()))
+f.add_rule(seccomp.ALLOW, "write", seccomp.Arg(0, seccomp.EQ, sys.stdout.fileno()))
+f.add_rule(seccomp.ALLOW, "write", seccomp.Arg(0, seccomp.EQ, sys.stderr.fileno()))
+
+f.add_rule(seccomp.ALLOW, "ioctl")
+f.add_rule(seccomp.ALLOW, "mmap")
+f.add_rule(seccomp.ALLOW, "munmap")
+
+# Needed for finding source code for exceptions.
+f.add_rule(seccomp.ALLOW, "stat")
+f.add_rule(seccomp.ALLOW, "open", seccomp.Arg(1, seccomp.MASKED_EQ, 0x3, 0))
+f.add_rule(seccomp.ALLOW, "fcntl")
+f.add_rule(seccomp.ALLOW, "fstat")
+f.add_rule(seccomp.ALLOW, "lseek")
+f.add_rule(seccomp.ALLOW, "read")
+f.add_rule(seccomp.ALLOW, "close")
+
+# Needed for code.InteractiveConsole.
+f.add_rule(seccomp.ALLOW, "access")
+f.add_rule(seccomp.ALLOW, "select")
+f.load()
+
+code.interact(banner='')
diff --git a/python/runner/interpreter.py b/python/runner/interpreter.py
deleted file mode 100755
index 5fa320a..0000000
--- a/python/runner/interpreter.py
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/python3
-
-# Apparently there is no (working) way to get a non-blocking stdout if we call
-# the Python interpreter directly with subprocess.Popen. For some reason, this
-# works.
-
-import code
-code.interact(banner='')
diff --git a/python/runner/main.py b/python/runner/main.py
deleted file mode 100755
index 4e1af53..0000000
--- a/python/runner/main.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/python3
-
-from fcntl import fcntl, F_GETFL, F_SETFL
-import io
-import multiprocessing
-import multiprocessing.managers
-import os
-import subprocess
-import sys
-import threading
-import time
-import uuid
-
-interpreters = {}
-module_access_lock = threading.Lock()
-
-# Execute [code] and evaluate [expr]. Input is given by the string [stdin].
-# Return result of evaluation, the contents of stdout and stderr, and the
-# exception traceback.
-def _run_exec(conn, code, expr=None, stdin=''):
- result, out, err, exc = None, None, None, None
- sys.stdin = io.StringIO(stdin)
- sys.stdout = io.StringIO()
- sys.stderr = io.StringIO()
- try:
- env = {}
- if code:
- exec(code, env)
- if expr:
- result = eval(expr, env)
- except Exception as ex:
- # Exception is not JSON serializable, so return traceback as string.
- import traceback
- exc = traceback.format_exc()
- finally:
- out = sys.stdout.getvalue()
- err = sys.stderr.getvalue()
- sys.stdin.close()
- sys.stdout.close()
- sys.stderr.close()
- conn.send((result, out, err, exc))
-
-class Python(object):
- # Call run_exec in a separate process for each input and return a list of
- # results from those calls. If a call times out, 'timed out' is returned in
- # place of exception traceback.
- def run(self, code, inputs, timeout):
- # Launch processes.
- futures = []
- for expr, stdin in inputs:
- conn_parent, conn_child = multiprocessing.Pipe()
- p = multiprocessing.Process(target=_run_exec, args=(conn_child, code, expr, stdin))
- p.start()
- futures.append((p, conn_parent))
-
- # Wait for results.
- results = []
- start = time.monotonic()
- for p, conn in futures:
- now = time.monotonic()
- real_timeout = max(0, timeout - (now - start))
- if conn.poll(real_timeout):
- results.append(conn.recv())
- else:
- results.append('timed out')
- p.terminate()
- return results
-
- # Start and return a new Python interpreter process.
- def create(self):
- directory = os.path.dirname(os.path.realpath(__file__))
- script = os.path.join(directory, 'interpreter.py')
- p = subprocess.Popen([script],
- stdin=subprocess.PIPE,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- # Set the non-blocking flag for stdout.
- flags = fcntl(p.stdout, F_GETFL)
- fcntl(p.stdout, F_SETFL, flags | os.O_NONBLOCK)
-
- interpreter_id = uuid.uuid4().hex
- with module_access_lock:
- interpreters[interpreter_id] = p
- return interpreter_id
-
- # Read any available bytes from the interpreter's stdout.
- def pull(self, interpreter_id):
- with module_access_lock:
- interpreter = interpreters[interpreter_id]
- stdout = interpreter.stdout.read()
- if stdout:
- stdout = stdout.decode('utf-8')
- return stdout
-
- # Push a string to the interpreter's stdin.
- def push(self, interpreter_id, stdin):
- with module_access_lock:
- interpreter = interpreters[interpreter_id]
- interpreter.stdin.write(stdin.encode('utf-8'))
- interpreter.stdin.flush()
-
- # Kill an interpreter process.
- def destroy(self, interpreter_id):
- with module_access_lock:
- interpreter = interpreters[interpreter_id]
- del interpreters[interpreter_id]
- interpreter.kill()
-
-class PythonManager(multiprocessing.managers.BaseManager):
- pass
-
-PythonManager.register('Python', callable=Python)
-
-if __name__ == '__main__':
- print('Python engine started.')
- m = PythonManager(address=('localhost', 3031), authkey=b'c0d3q3y-python')
- m.get_server().serve_forever()