Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM python:alpine
FROM python:3.13-alpine

WORKDIR /app

Expand Down
37 changes: 21 additions & 16 deletions dispatcher/dispatcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@
import requests
import pathlib
import queue
import textwrap
import shutil
import tempfile
from datetime import datetime

from runner.submission import SubmissionRunner
Expand Down Expand Up @@ -293,11 +293,6 @@ def create_container(
finally:
self.dec_container()
logger().info(f'finish task {submission_id}/{case_no}')
# truncate long stdout/stderr
_res = res.copy()
for k in ('Stdout', 'Stderr'):
_res[k] = textwrap.shorten(_res.get(k, ''), 37, placeholder='...')
logger().debug(f'runner result: {_res}')
with self.locks[submission_id]:
self.on_case_complete(
submission_id=submission_id,
Expand Down Expand Up @@ -378,16 +373,26 @@ def on_submission_complete(self, submission_id: str):
assert [*submission_result.keys()] == [*range(len(submission_result))]
submission_result = [*submission_result.values()]
# post data
submission_data = {
'tasks': submission_result,
'token': config.SANDBOX_TOKEN
}
self.release(submission_id)
logger().info(f'send to BE [submission_id={submission_id}]')
resp = requests.put(
f'{config.BACKEND_API}/submission/{submission_id}/complete',
json=submission_data,
)
with tempfile.NamedTemporaryFile("w") as tmpf:
submission_data = {
'tasks': submission_result,
'token': config.SANDBOX_TOKEN
}
# write payload to file
json.dump(submission_data, tmpf)
tmpf.flush()
# release resources
del submission_data
self.release(submission_id)

logger().info(f'send to BE [submission_id={submission_id}]')
# open in binary mode as requests needs a binary stream
with open(tmpf.name, "rb") as payload:
resp = requests.put(
f'{config.BACKEND_API}/submission/{submission_id}/complete',
data=payload,
headers={'Content-Type': 'application/json'},
)
logger().debug(f'get BE response: [{resp.status_code}] {resp.text}', )
# clear
if resp.ok:
Expand Down
47 changes: 21 additions & 26 deletions runner/sandbox.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
import tarfile
import tempfile
from dataclasses import dataclass
from io import BytesIO
from typing import Optional
from pathlib import Path
import docker


Expand Down Expand Up @@ -115,21 +115,11 @@ def run(self):
raise JudgeError
# retrive result
try:
result = self.get(
container=container,
path='/result/',
filename='result',
).split('\n')
stdout = self.get(
container=container,
path='/result/',
filename='stdout',
)
stderr = self.get(
container=container,
path='/result/',
filename='stderr',
result, stdout, stderr = self.get_result(
container,
['result', 'stdout', 'stderr'],
)
result = result.split('\n')
except Exception as e:
self.client.remove_container(container, v=True, force=True)
logging.error(e)
Expand All @@ -147,16 +137,21 @@ def run(self):
DockerExitCode=exit_status['StatusCode'],
)

def get(self, container, path, filename):
bits, _ = self.client.get_archive(container, f'{path}{filename}')
tarbits = b''.join(bits)
tar = tarfile.open(fileobj=BytesIO(tarbits))
with tempfile.TemporaryDirectory() as extract_path:
tar.extract(filename, extract_path)
with open(
f'{extract_path}/{filename}',
def get_result(self, container, filenames: list[str]) -> list[str]:
result_dir = '/result'
bits, _ = self.client.get_archive(container, result_dir)
with (tempfile.NamedTemporaryFile() as
tarball, tempfile.TemporaryDirectory() as extract_path):
for chunk in bits:
tarball.write(chunk)
tarball.flush()
tarball.seek(0)
with tarfile.open(fileobj=tarball) as tar:
tar.extractall(extract_path)
return [
open(
Path(extract_path) / result_dir.lstrip('/') / filename,
'r',
errors='ignore',
) as f:
contents = f.read()
return contents
).read() for filename in filenames
]