Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
d1a4fa6
feat: implement filtered copy from container with size and directory …
Matios102 Dec 29, 2025
bc5d10f
refactor: remove unused functions and clean up code in utils package
Matios102 Dec 29, 2025
9a7e986
fix: use background context for container cleanup to avoid potential …
Matios102 Dec 29, 2025
ed0112c
refactor: remove redundant checks in ExtractTarArchiveFiltered for im…
Matios102 Dec 29, 2025
6c476e3
refactor: simplify directory allowance check in tar extraction process
Matios102 Dec 29, 2025
7056d00
refactor: standardize runner user configuration in Dockerfiles and co…
Matios102 Dec 29, 2025
d4445c3
refactor: enhance host configuration by incorporating memory limits f…
Matios102 Dec 29, 2025
5c5e1b3
refactor: replace file count validation with path depth check in tar …
Matios102 Dec 29, 2025
e5db573
refactor: reduce PidsLimit in buildHostConfig from 64 to 4 for improv…
Matios102 Dec 29, 2025
c47f892
refactor: increase PidsLimit in buildHostConfig from 4 to 8 for enhan…
Matios102 Dec 29, 2025
9c21d99
Update internal/stages/executor/cpp/Dockerfile
Matios102 Dec 29, 2025
df7d712
Update internal/stages/executor/python/Dockerfile
Matios102 Dec 30, 2025
247f60d
handle compilation inside the container, remove compiler class from t…
Matios102 Dec 30, 2025
b9197a6
whitespace
Matios102 Dec 30, 2025
d768e25
fix: adjust compilation error handling to exit with 0 on compilation …
Matios102 Dec 30, 2025
3ee2f5e
fix: compilation error detection
Matios102 Dec 30, 2025
af5a42b
tests
Matios102 Dec 30, 2025
d3e3371
fix: improve compilation error handling and update buildCompileComman…
Matios102 Dec 30, 2025
dddf447
Merge branch 'master' into task/containerized-compilation
Matios102 Jan 12, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
FROM golang:1.23-bookworm AS builder

RUN apt-get update && \
apt-get install -y sudo debootstrap schroot g++ && \
apt-get install -y sudo debootstrap schroot && \
rm -rf /var/lib/apt/lists/*

WORKDIR /app
Expand All @@ -17,7 +17,6 @@ FROM debian:bookworm-slim

RUN apt-get update && \
apt-get install -y \
g++ \
docker.io \
&& rm -rf /var/lib/apt/lists/*

Expand Down
4 changes: 1 addition & 3 deletions cmd/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import (
"github.com/mini-maxit/worker/internal/rabbitmq/consumer"
"github.com/mini-maxit/worker/internal/rabbitmq/responder"
"github.com/mini-maxit/worker/internal/scheduler"
"github.com/mini-maxit/worker/internal/stages/compiler"
"github.com/mini-maxit/worker/internal/stages/executor"
"github.com/mini-maxit/worker/internal/stages/packager"
"github.com/mini-maxit/worker/internal/stages/verifier"
Expand Down Expand Up @@ -47,7 +46,6 @@ func main() {
if err := fileCache.InitCache(); err != nil {
logger.Fatalf("Failed to initialize file cache: %v", err)
}
compiler := compiler.NewCompiler()
packager := packager.NewPackager(storageService, fileCache)
executor := executor.NewExecutor(dCli)
verifier := verifier.NewVerifier(config.VerifierFlags)
Expand All @@ -57,7 +55,7 @@ func main() {
logger.Error("Failed to close responder", err)
}
}()
scheduler := scheduler.NewScheduler(config.MaxWorkers, compiler, packager, executor, verifier, responder)
scheduler := scheduler.NewScheduler(config.MaxWorkers, packager, executor, verifier, responder)
consumer := consumer.NewConsumer(workerChannel, config.ConsumeQueueName, scheduler, responder)

// Start listening for messages
Expand Down
1 change: 0 additions & 1 deletion generate_mocks.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ INTERFACES=(
"internal/rabbitmq/channel Channel"
"internal/rabbitmq/responder Responder"
"internal/scheduler Scheduler"
"internal/stages/compiler Compiler"
"internal/stages/executor Executor"
"internal/stages/packager Packager"
"internal/stages/verifier Verifier"
Expand Down
4 changes: 3 additions & 1 deletion internal/docker/docker_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ type DockerClient interface {
ctx context.Context,
containerID, srcPath, dstPath string,
allowedDirs []string,
alwaysCopyFiles []string,
maxFileSize int64,
maxFilesInDir int,
) error
Expand Down Expand Up @@ -151,6 +152,7 @@ func (d *dockerClient) CopyFromContainerFiltered(
ctx context.Context,
containerID, srcPath, dstPath string,
allowedDirs []string,
alwaysCopyFiles []string,
maxFileSize int64,
maxFilesInDir int,
) error {
Expand All @@ -163,7 +165,7 @@ func (d *dockerClient) CopyFromContainerFiltered(
defer reader.Close()

// Extract tar archive with filtering to destination
err = utils.ExtractTarArchiveFiltered(reader, dstPath, allowedDirs, maxFileSize, maxFilesInDir)
err = utils.ExtractTarArchiveFiltered(reader, dstPath, allowedDirs, alwaysCopyFiles, maxFileSize, maxFilesInDir)
if err != nil {
d.logger.Errorf("Failed to extract filtered tar archive to %s: %s", dstPath, err)
}
Expand Down
53 changes: 20 additions & 33 deletions internal/pipeline/pipeline.go
Original file line number Diff line number Diff line change
@@ -1,17 +1,15 @@
package pipeline

import (
"errors"
"fmt"
"os"

"github.com/mini-maxit/worker/internal/logger"
"github.com/mini-maxit/worker/internal/rabbitmq/responder"
"github.com/mini-maxit/worker/internal/stages/compiler"
"github.com/mini-maxit/worker/internal/stages/executor"
"github.com/mini-maxit/worker/internal/stages/packager"
"github.com/mini-maxit/worker/internal/stages/verifier"
"github.com/mini-maxit/worker/pkg/constants"
customErr "github.com/mini-maxit/worker/pkg/errors"
"github.com/mini-maxit/worker/pkg/languages"
"github.com/mini-maxit/worker/pkg/messages"
"github.com/mini-maxit/worker/pkg/solution"
Expand All @@ -37,7 +35,6 @@ type worker struct {
id int
state WorkerState
responseQueue string
compiler compiler.Compiler
packager packager.Packager
executor executor.Executor
verifier verifier.Verifier
Expand All @@ -47,7 +44,6 @@ type worker struct {

func NewWorker(
id int,
compiler compiler.Compiler,
packager packager.Packager,
executor executor.Executor,
verifier verifier.Verifier,
Expand All @@ -58,7 +54,6 @@ func NewWorker(
return &worker{
id: id,
state: WorkerState{Status: constants.WorkerStatusIdle, ProcessingMessageID: ""},
compiler: compiler,
packager: packager,
executor: executor,
verifier: verifier,
Expand Down Expand Up @@ -136,28 +131,7 @@ func (ws *worker) ProcessTask(messageID, responseQueue string, task *messages.Ta
}
}()

err = ws.compiler.CompileSolutionIfNeeded(
langType,
task.LanguageVersion,
dc.UserSolutionPath,
dc.UserExecFilePath,
dc.CompileErrFilePath,
messageID)

if err != nil {
if errors.Is(err, customErr.ErrCompilationFailed) {
ws.publishCompilationError(dc, task.TestCases)
return
}

ws.responder.PublishTaskErrorToResponseQueue(
constants.QueueMessageTypeTask,
ws.state.ProcessingMessageID,
ws.responseQueue,
err,
)
return
}
requiresCompilation := !langType.IsScriptingLanguage()

limits := make([]solution.Limit, len(task.TestCases))
for i, tc := range task.TestCases {
Expand All @@ -168,11 +142,13 @@ func (ws *worker) ProcessTask(messageID, responseQueue string, task *messages.Ta
}

cfg := executor.CommandConfig{
MessageID: messageID,
DirConfig: dc,
LanguageType: langType,
LanguageVersion: task.LanguageVersion,
TestCases: task.TestCases,
MessageID: messageID,
DirConfig: dc,
LanguageType: langType,
LanguageVersion: task.LanguageVersion,
TestCases: task.TestCases,
SourceFilePath: dc.UserSolutionPath,
RequiresCompiling: requiresCompilation,
}

err = ws.executor.ExecuteCommand(cfg)
Expand All @@ -186,6 +162,16 @@ func (ws *worker) ProcessTask(messageID, responseQueue string, task *messages.Ta
return
}

if requiresCompilation {
// Check for compilation error
fileInfo, statErr := os.Stat(dc.CompileErrFilePath)

if statErr == nil && fileInfo.Size() > 0 {
ws.publishCompilationError(dc, task.TestCases)
return
}
}

solutionResult := ws.verifier.EvaluateAllTestCases(dc, task.TestCases, messageID, langType)

err = ws.packager.SendSolutionPackage(dc, task.TestCases /*hasCompilationErr*/, false, messageID)
Expand All @@ -209,6 +195,7 @@ func (ws *worker) ProcessTask(messageID, responseQueue string, task *messages.Ta
}

func (ws *worker) publishCompilationError(dirConfig *packager.TaskDirConfig, testCases []messages.TestCase) {
ws.logger.Infof("Compilation error occurred for message ID: %s", ws.state.ProcessingMessageID)
sendErr := ws.packager.SendSolutionPackage(dirConfig, testCases, true, ws.state.ProcessingMessageID)
if sendErr != nil {
ws.responder.PublishTaskErrorToResponseQueue(
Expand Down
Loading