Feat: add code_executor_manager (#7814)

### What problem does this PR solve?

Add code_executor_manager. #4977.

### Type of change

- [x] New Feature (non-breaking change which adds functionality)
This commit is contained in:
Yongteng Lei
2025-05-23 16:33:38 +08:00
committed by GitHub
parent db4371c745
commit 2d7c1368f0
39 changed files with 3240 additions and 0 deletions

View File

@ -0,0 +1,15 @@
#
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#

View File

@ -0,0 +1,245 @@
#
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import asyncio
import base64
import json
import os
import time
import uuid
from core.config import TIMEOUT
from core.container import allocate_container_blocking, release_container
from core.logger import logger
from models.enums import ResourceLimitType, ResultStatus, RuntimeErrorType, SupportLanguage, UnauthorizedAccessType
from models.schemas import CodeExecutionRequest, CodeExecutionResult
from utils.common import async_run_command
async def execute_code(req: CodeExecutionRequest):
"""Fully asynchronous execution logic"""
language = req.language
container = await allocate_container_blocking(language)
if not container:
return CodeExecutionResult(
status=ResultStatus.PROGRAM_RUNNER_ERROR,
stdout="",
stderr="Container pool is busy",
exit_code=-10,
detail="no_available_container",
)
task_id = str(uuid.uuid4())
workdir = f"/tmp/sandbox_{task_id}"
os.makedirs(workdir, mode=0o700, exist_ok=True)
try:
if language == SupportLanguage.PYTHON:
code_name = "main.py"
# code
code_path = os.path.join(workdir, code_name)
with open(code_path, "wb") as f:
f.write(base64.b64decode(req.code_b64))
# runner
runner_name = "runner.py"
runner_path = os.path.join(workdir, runner_name)
with open(runner_path, "w") as f:
f.write("""import json
import os
import sys
sys.path.insert(0, os.path.dirname(__file__))
from main import main
if __name__ == "__main__":
args = json.loads(sys.argv[1])
result = main(**args)
if result is not None:
print(result)
""")
elif language == SupportLanguage.NODEJS:
code_name = "main.js"
code_path = os.path.join(workdir, "main.js")
with open(code_path, "wb") as f:
f.write(base64.b64decode(req.code_b64))
runner_name = "runner.js"
runner_path = os.path.join(workdir, "runner.js")
with open(runner_path, "w") as f:
f.write("""
const fs = require('fs');
const path = require('path');
const args = JSON.parse(process.argv[2]);
const mainPath = path.join(__dirname, 'main.js');
if (fs.existsSync(mainPath)) {
const { main } = require(mainPath);
if (typeof args === 'object' && args !== null) {
main(args).then(result => {
if (result !== null) {
console.log(result);
}
}).catch(err => {
console.error('Error in main function:', err);
});
} else {
console.error('Error: args is not a valid object:', args);
}
} else {
console.error('main.js not found in the current directory');
}
""")
# dirs
returncode, _, stderr = await async_run_command("docker", "exec", container, "mkdir", "-p", f"/workspace/{task_id}", timeout=5)
if returncode != 0:
raise RuntimeError(f"Directory creation failed: {stderr}")
# archive
tar_proc = await asyncio.create_subprocess_exec("tar", "czf", "-", "-C", workdir, code_name, runner_name, stdout=asyncio.subprocess.PIPE)
tar_stdout, _ = await tar_proc.communicate()
# unarchive
docker_proc = await asyncio.create_subprocess_exec(
"docker", "exec", "-i", container, "tar", "xzf", "-", "-C", f"/workspace/{task_id}", stdin=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await docker_proc.communicate(input=tar_stdout)
if docker_proc.returncode != 0:
raise RuntimeError(stderr.decode())
# exec
start_time = time.time()
try:
logger.info(f"Passed in args: {req.arguments}")
args_json = json.dumps(req.arguments or {})
run_args = [
"docker",
"exec",
"--workdir",
f"/workspace/{task_id}",
container,
"timeout",
str(TIMEOUT),
language,
]
# flags
if language == SupportLanguage.PYTHON:
run_args.extend(["-I", "-B"])
elif language == SupportLanguage.NODEJS:
run_args.extend([])
else:
assert True, "Will never reach here"
run_args.extend([runner_name, args_json])
returncode, stdout, stderr = await async_run_command(
*run_args,
timeout=TIMEOUT + 5,
)
time_used_ms = (time.time() - start_time) * 1000
logger.info("----------------------------------------------")
logger.info(f"Code: {str(base64.b64decode(req.code_b64))}")
logger.info(f"{returncode=}")
logger.info(f"{stdout=}")
logger.info(f"{stderr=}")
logger.info(f"{args_json=}")
if returncode == 0:
return CodeExecutionResult(
status=ResultStatus.SUCCESS,
stdout=str(stdout),
stderr=stderr,
exit_code=0,
time_used_ms=time_used_ms,
)
elif returncode == 124:
return CodeExecutionResult(
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
stdout="",
stderr="Execution timeout",
exit_code=-124,
resource_limit_type=ResourceLimitType.TIME,
time_used_ms=time_used_ms,
)
elif returncode == 137:
return CodeExecutionResult(
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
stdout="",
stderr="Memory limit exceeded (killed by OOM)",
exit_code=-137,
resource_limit_type=ResourceLimitType.MEMORY,
time_used_ms=time_used_ms,
)
return analyze_error_result(stderr, returncode)
except asyncio.TimeoutError:
await async_run_command("docker", "exec", container, "pkill", "-9", language)
return CodeExecutionResult(
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
stdout="",
stderr="Execution timeout",
exit_code=-1,
resource_limit_type=ResourceLimitType.TIME,
time_used_ms=(time.time() - start_time) * 1000,
)
except Exception as e:
logger.error(f"Execution exception: {str(e)}")
return CodeExecutionResult(status=ResultStatus.PROGRAM_RUNNER_ERROR, stdout="", stderr=str(e), exit_code=-3, detail="internal_error")
finally:
# cleanup
cleanup_tasks = [async_run_command("docker", "exec", container, "rm", "-rf", f"/workspace/{task_id}"), async_run_command("rm", "-rf", workdir)]
await asyncio.gather(*cleanup_tasks, return_exceptions=True)
await release_container(container, language)
def analyze_error_result(stderr: str, exit_code: int) -> CodeExecutionResult:
"""Analyze the error result and classify it"""
if "Permission denied" in stderr:
return CodeExecutionResult(
status=ResultStatus.UNAUTHORIZED_ACCESS,
stdout="",
stderr=stderr,
exit_code=exit_code,
unauthorized_access_type=UnauthorizedAccessType.FILE_ACCESS,
)
elif "Operation not permitted" in stderr:
return CodeExecutionResult(
status=ResultStatus.UNAUTHORIZED_ACCESS,
stdout="",
stderr=stderr,
exit_code=exit_code,
unauthorized_access_type=UnauthorizedAccessType.DISALLOWED_SYSCALL,
)
elif "MemoryError" in stderr:
return CodeExecutionResult(
status=ResultStatus.RESOURCE_LIMIT_EXCEEDED,
stdout="",
stderr=stderr,
exit_code=exit_code,
resource_limit_type=ResourceLimitType.MEMORY,
)
else:
return CodeExecutionResult(
status=ResultStatus.PROGRAM_ERROR,
stdout="",
stderr=stderr,
exit_code=exit_code,
runtime_error_type=RuntimeErrorType.NONZERO_EXIT,
)

View File

@ -0,0 +1,38 @@
#
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from fastapi import Request
from fastapi.responses import JSONResponse
from models.enums import ResultStatus
from models.schemas import CodeExecutionResult
from slowapi import Limiter
from slowapi.errors import RateLimitExceeded
from slowapi.util import get_remote_address
limiter = Limiter(key_func=get_remote_address)
async def rate_limit_exceeded_handler(request: Request, exc: Exception) -> JSONResponse:
if isinstance(exc, RateLimitExceeded):
return JSONResponse(
content=CodeExecutionResult(
status=ResultStatus.PROGRAM_RUNNER_ERROR,
stdout="",
stderr="Too many requests, please try again later",
exit_code=-429,
detail="Too many requests, please try again later",
).model_dump(),
)
raise exc

View File

@ -0,0 +1,173 @@
#
# Copyright 2025 The InfiniFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import ast
from typing import List, Tuple
from core.logger import logger
from models.enums import SupportLanguage
class SecurePythonAnalyzer(ast.NodeVisitor):
"""
An AST-based analyzer for detecting unsafe Python code patterns.
"""
DANGEROUS_IMPORTS = {"os", "subprocess", "sys", "shutil", "socket", "ctypes", "pickle", "threading", "multiprocessing", "asyncio", "http.client", "ftplib", "telnetlib"}
DANGEROUS_CALLS = {
"eval",
"exec",
"open",
"__import__",
"compile",
"input",
"system",
"popen",
"remove",
"rename",
"rmdir",
"chdir",
"chmod",
"chown",
"getattr",
"setattr",
"globals",
"locals",
"shutil.rmtree",
"subprocess.call",
"subprocess.Popen",
"ctypes",
"pickle.load",
"pickle.loads",
"pickle.dump",
"pickle.dumps",
}
def __init__(self):
self.unsafe_items: List[Tuple[str, int]] = []
def visit_Import(self, node: ast.Import):
"""Check for dangerous imports."""
for alias in node.names:
if alias.name.split(".")[0] in self.DANGEROUS_IMPORTS:
self.unsafe_items.append((f"Import: {alias.name}", node.lineno))
self.generic_visit(node)
def visit_ImportFrom(self, node: ast.ImportFrom):
"""Check for dangerous imports from specific modules."""
if node.module and node.module.split(".")[0] in self.DANGEROUS_IMPORTS:
self.unsafe_items.append((f"From Import: {node.module}", node.lineno))
self.generic_visit(node)
def visit_Call(self, node: ast.Call):
"""Check for dangerous function calls."""
if isinstance(node.func, ast.Name) and node.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append((f"Call: {node.func.id}", node.lineno))
self.generic_visit(node)
def visit_Attribute(self, node: ast.Attribute):
"""Check for dangerous attribute access."""
if isinstance(node.value, ast.Name) and node.value.id in self.DANGEROUS_IMPORTS:
self.unsafe_items.append((f"Attribute Access: {node.value.id}.{node.attr}", node.lineno))
self.generic_visit(node)
def visit_BinOp(self, node: ast.BinOp):
"""Check for possible unsafe operations like concatenating strings with commands."""
# This could be useful to detect `eval("os." + "system")`
if isinstance(node.left, ast.Constant) and isinstance(node.right, ast.Constant):
self.unsafe_items.append(("Possible unsafe string concatenation", node.lineno))
self.generic_visit(node)
def visit_FunctionDef(self, node: ast.FunctionDef):
"""Check for dangerous function definitions (e.g., user-defined eval)."""
if node.name in self.DANGEROUS_CALLS:
self.unsafe_items.append((f"Function Definition: {node.name}", node.lineno))
self.generic_visit(node)
def visit_Assign(self, node: ast.Assign):
"""Check for assignments to variables that might lead to dangerous operations."""
for target in node.targets:
if isinstance(target, ast.Name) and target.id in self.DANGEROUS_CALLS:
self.unsafe_items.append((f"Assignment to dangerous variable: {target.id}", node.lineno))
self.generic_visit(node)
def visit_Lambda(self, node: ast.Lambda):
"""Check for lambda functions with dangerous operations."""
if isinstance(node.body, ast.Call) and isinstance(node.body.func, ast.Name) and node.body.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append(("Lambda with dangerous function call", node.lineno))
self.generic_visit(node)
def visit_ListComp(self, node: ast.ListComp):
"""Check for list comprehensions with dangerous operations."""
# First, visit the generators to check for any issues there
for elem in node.generators:
if isinstance(elem, ast.comprehension):
self.generic_visit(elem)
if isinstance(node.elt, ast.Call) and isinstance(node.elt.func, ast.Name) and node.elt.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append(("List comprehension with dangerous function call", node.lineno))
self.generic_visit(node)
def visit_DictComp(self, node: ast.DictComp):
"""Check for dictionary comprehensions with dangerous operations."""
# Check for dangerous calls in both the key and value expressions of the dictionary comprehension
if isinstance(node.key, ast.Call) and isinstance(node.key.func, ast.Name) and node.key.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append(("Dict comprehension with dangerous function call in key", node.lineno))
if isinstance(node.value, ast.Call) and isinstance(node.value.func, ast.Name) and node.value.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append(("Dict comprehension with dangerous function call in value", node.lineno))
# Visit other sub-nodes (e.g., the generators in the comprehension)
self.generic_visit(node)
def visit_SetComp(self, node: ast.SetComp):
"""Check for set comprehensions with dangerous operations."""
for elt in node.generators:
if isinstance(elt, ast.comprehension):
self.generic_visit(elt)
if isinstance(node.elt, ast.Call) and isinstance(node.elt.func, ast.Name) and node.elt.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append(("Set comprehension with dangerous function call", node.lineno))
self.generic_visit(node)
def visit_Yield(self, node: ast.Yield):
"""Check for yield statements that could be used to produce unsafe values."""
if isinstance(node.value, ast.Call) and isinstance(node.value.func, ast.Name) and node.value.func.id in self.DANGEROUS_CALLS:
self.unsafe_items.append(("Yield with dangerous function call", node.lineno))
self.generic_visit(node)
def analyze_code_security(code: str, language: SupportLanguage) -> Tuple[bool, List[Tuple[str, int]]]:
"""
Analyze the provided code string and return whether it's safe and why.
:param code: The source code to analyze.
:param language: The programming language of the code.
:return: (is_safe: bool, issues: List of (description, line number))
"""
if language == SupportLanguage.PYTHON:
try:
tree = ast.parse(code)
analyzer = SecurePythonAnalyzer()
analyzer.visit(tree)
return len(analyzer.unsafe_items) == 0, analyzer.unsafe_items
except Exception as e:
logger.error(f"[SafeCheck] Python parsing failed: {str(e)}")
return False, [(f"Parsing Error: {str(e)}", -1)]
else:
logger.warning(f"[SafeCheck] Unsupported language for security analysis: {language} — defaulting to SAFE (manual review recommended)")
return True, [(f"Unsupported language for security analysis: {language} — defaulted to SAFE, manual review recommended", -1)]