Added executer in entirety
authorBrendan Hansen <brendan.f.hansen@gmail.com>
Fri, 30 Aug 2019 03:33:02 +0000 (22:33 -0500)
committerBrendan Hansen <brendan.f.hansen@gmail.com>
Fri, 30 Aug 2019 03:33:02 +0000 (22:33 -0500)
26 files changed:
codebox/config.moon
codebox/controllers/account/login.moon
codebox/controllers/executer/request.moon
codebox/controllers/executer/status_update.moon
codebox/controllers/index.moon
codebox/facades/executer.moon
codebox/migrations.moon
codebox/models/jobs.moon
codebox/models/problems.moon
codebox/models/test_cases.moon [new file with mode: 0644]
codebox/nginx.conf
executer/Tuprules.tup
executer/app/child_process.coffee [new file with mode: 0644]
executer/app/compilers/Tupfile [new file with mode: 0644]
executer/app/compilers/base_compiler.coffee [new file with mode: 0644]
executer/app/compilers/c_compiler.coffee [new file with mode: 0644]
executer/app/compilers/cpp_compiler.coffee [new file with mode: 0644]
executer/app/compilers/secure/seccomp.c [new file with mode: 0644]
executer/app/executer.coffee [new file with mode: 0644]
executer/app/executers/Tupfile [new file with mode: 0644]
executer/app/executers/base_executer.coffee [new file with mode: 0644]
executer/app/executers/c_executer.coffee [new file with mode: 0644]
executer/app/matchers.coffee [new file with mode: 0644]
executer/app/routes.coffee
executer/app/temp_file.coffee [new file with mode: 0644]
executer/package.json

index f6819e214a181b5d9c1bee5a828af96bc901888b..e622ccad78a8ab898b732fba05df9533826b3140 100644 (file)
@@ -6,6 +6,8 @@ config "development", ->
        secret (os.getenv 'APP_SECRET')
        req_secret (os.getenv 'REQ_SECRET')
 
+       executer_addr 'http://192.168.0.4:8080'
+
        postgres ->
                -- Have to use a fixed ip since the container name
                -- was not resolving correctly
index c6caa481128276096fc4b7eb18389f295d520216..99fdcaf1bf1a9718b8b9a1026440453a75e4608a 100644 (file)
@@ -22,7 +22,7 @@ make_controller
                        { "password", exists: true, min_length: 2 }
                }
 
-               users = Users\select "where username = ? limit 1", @params.username
+               users = Users\find username: @params.username
                if #users > 0
                        if @crypto.verify @params.password, users[1].password_hash
                                @session.user_id = users[1].id
index aad1c7aa5631341ab9c888528f3f57cf75621086..ba86a2e92815d305855bc14cbf407d92a03ad24d 100644 (file)
@@ -1,6 +1,7 @@
 import make_controller from require "controllers.controller"
 import assert_valid from require "lapis.validate"
 import capture_errors from require 'lapis.application'
+import Problems from require 'models'
 
 make_controller
        inject:
@@ -15,7 +16,13 @@ make_controller
                        { 'problem_id', exists: true, is_integer: true }
                }
 
-               id = @executer\request @params.lang, @params.code
+               problem = Problems\find @params.problem_id
+               unless problem
+                       return json: { status: 'problem not found' }
+
+               test_cases = problem\get_test_cases!
+
+               id = @executer\request @params.lang, @params.code, @params.problem_id, test_cases, problem.time_limit
 
                json: id
        ), =>
index 3a934ff9e883a03aa8215bb5b3db38b53cec29d5..4565ff99e8ab1ccedb2fc717e16df0c3e497600b 100644 (file)
@@ -1,8 +1,30 @@
 import make_controller from require "controllers.controller"
+import Jobs from require 'models'
+import from_json, to_json from require 'lapis.util'
+import assert_valid from require 'lapis.validate'
+import capture_errors, yield_error from require 'lapis.application'
 
 make_controller
        middleware: { 'internal_request' }
 
-       post: =>
-               print 'Hit status'
+       post: capture_errors (=>
+               assert_valid @params, {
+                       { 'job_id', exists: true }
+                       { 'status', exists: true }
+               }
+
+               job = Jobs\find job_id: @params.job_id
+               unless job
+                       yield_error 'Job not found'
+                       return
+
+               status = from_json @params.status
+
+               job\update {
+                       status: status.status
+                       data: to_json status.data
+               }
+
                json: { status: 'success' }
+       ), =>
+               json: { status: 'error', errors: @errors }
index a788ddbfd6995a219bc0d79b693f42e2e386f40a..d73d7e25adaf4747387a7f56dbe4ec8698de7d32 100644 (file)
@@ -7,5 +7,5 @@ make_controller
 
        get: =>
                @users = Users\select!
-               @jobs = @user\get_c_jobs!
+               @jobs = @user\get_jobs!
                render: "index"
index f14ec602b9d8d43daee339173f65dd6a752f5350..6b7ad6d00666dcde0743a53c4b1cf1ad848c9d97 100644 (file)
@@ -1,13 +1,32 @@
+config = (require 'lapis.config').get!
 http = require 'lapis.nginx.http'
+
 import from_json, to_json from require 'lapis.util'
+import format_date from require 'lapis.db'
+import Jobs from require 'models'
 
 class ExecuterFacade
-       request: (lang, code) =>
-               body = http.simple 'http://192.168.0.4:8080/submit', {
+       request: (lang, code, problem_id, test_cases, time_limit) =>
+               body = http.simple "#{config.executer_addr}/request", {
+                       :lang
+                       :code
+                       :time_limit
+                       test_cases: to_json test_cases
+               }
+
+               -- Maybe add error checking?
+
+               job_id = from_json(body).id
+
+               job = Jobs\create {
+                       job_id: job_id
+                       user_id: 1
+                       problem_id: problem_id
+                       status: Jobs.statuses\for_db 'queued'
                        lang: lang
                        code: code
+                       time_initiated: format_date!
                }
 
-               from_json(body).id
-
+               job_id
 
index 74e2f8dd9c6917c5e97091a43efac6f15c357012..eb2917137314cd71e18f6d6c914ee175a2fa28f7 100644 (file)
@@ -1,4 +1,4 @@
-import create_table, types from require "lapis.db.schema"
+import create_table, add_column, types from require "lapis.db.schema"
 
 {
        [1]: =>
@@ -15,12 +15,14 @@ import create_table, types from require "lapis.db.schema"
        [2]: =>
                create_table "jobs", {
                        { "id", types.serial },
+                       { "job_id", types.varchar unique: true },
                        { "user_id", types.foreign_key },
                        { "problem_id", types.foreign_key },
-                       { "status", types.text null: true },
+                       { "status", types.enum },
                        { "lang", types.varchar },
                        { "code", types.text null: true },
                        { "time_initiated", types.time },
+                       { "data", types.text null: true },
 
                        "PRIMARY KEY (id)"
                }
@@ -33,6 +35,16 @@ import create_table, types from require "lapis.db.schema"
                        { "description", types.text null: true },
                        { "time_limit", types.integer },
 
+                       "PRIMARY KEY (id)"
+               }
+
+       [4]: =>
+               create_table "test_cases", {
+                       { "id", types.serial },
+                       { "problem_id", types.foreign_key },
+                       { "input", types.varchar },
+                       { "output", types.varchar },
+
                        "PRIMARY KEY (id)"
                }
 }
index 45be721a4120271f5c382d77c5973042ef460cc5..9b634372a2fec493fd9f41c7b9b7057a383ebba9 100644 (file)
@@ -1,6 +1,19 @@
-import Model from require "lapis.db.model"
+import Model, enum from require "lapis.db.model"
 
 class Jobs extends Model
+       @statuses: enum {
+               queued: 1
+               compiling: 2
+               running: 3
+               completed: 4
+               wrong_answer: 5
+               timed_out: 6
+               bad_language: 7
+               bad_problem: 8
+               compile_err: 9
+               error: 10
+       }
+
        @relations: {
                { 'user', belongs_to: 'Users' }
                { 'problem', belongs_to: 'Problems' }
index 577e4755379aab031a1fe15a8fe223dcfea9ff8a..fd7469ceef15dc0e4443d284f3e2cbbd33ff9bb5 100644 (file)
@@ -9,4 +9,5 @@ class Problems extends Model
 
        @relations: {
                { "jobs", has_many: 'Jobs' }
+               { "test_cases", has_many: 'TestCases' }
        }
diff --git a/codebox/models/test_cases.moon b/codebox/models/test_cases.moon
new file mode 100644 (file)
index 0000000..ea50c16
--- /dev/null
@@ -0,0 +1,6 @@
+import Model from require 'lapis.db.model'
+
+class TestCases extends Model
+       @relations: {
+               { "problem", belongs_to: 'Problems' }
+       }
index c90f83e869fdd9366404fba51d710b778bc7bd12..33c48d121cb221ff2d76912c8e380a47f861723a 100644 (file)
@@ -36,6 +36,7 @@ http {
     location /favicon.ico {
       alias static/favicon.ico;
     }
+
        location /proxy {
                internal;
                rewrite_by_lua "
@@ -54,7 +55,6 @@ http {
                  end
                ";
 
-               resolver 8.8.8.8;
                proxy_http_version 1.1;
                proxy_pass $_url;
        }
index 9591588d3ae9b9d5a3717fcaa33f975e251a13dc..d73cd59b6a7d05e0305ec49d63ae5cc9c34e7319 100644 (file)
@@ -1 +1 @@
-: *.coffee |> coffee -c -o %B.js %f |> %B.js
+: foreach *.coffee |> coffee -c -o %B.js %f |> %B.js
diff --git a/executer/app/child_process.coffee b/executer/app/child_process.coffee
new file mode 100644 (file)
index 0000000..68cd15f
--- /dev/null
@@ -0,0 +1,11 @@
+cp = require 'child_process'
+
+module.exports = {
+       on_child_exit: (cp) ->
+               new Promise (res, rej) ->
+                       cp.on 'exit', (code) ->
+                               if code?
+                                       res code
+                               else
+                                       rej -1
+}
diff --git a/executer/app/compilers/Tupfile b/executer/app/compilers/Tupfile
new file mode 100644 (file)
index 0000000..f0fe651
--- /dev/null
@@ -0,0 +1 @@
+include_rules
diff --git a/executer/app/compilers/base_compiler.coffee b/executer/app/compilers/base_compiler.coffee
new file mode 100644 (file)
index 0000000..7676af5
--- /dev/null
@@ -0,0 +1,9 @@
+{ TempFile } = require '../temp_file'
+
+class BaseCompiler
+       compile: (code) ->
+               Promise.resolve (new TempFile code)
+
+module.exports = {
+       BaseCompiler: BaseCompiler
+}
diff --git a/executer/app/compilers/c_compiler.coffee b/executer/app/compilers/c_compiler.coffee
new file mode 100644 (file)
index 0000000..64af19b
--- /dev/null
@@ -0,0 +1,37 @@
+{ BaseCompiler } = require './base_compiler'
+{ TempFile } = require '../temp_file'
+
+{ spawn } = require 'child_process'
+{ on_child_exit } = require '../child_process'
+
+class CCompiler extends BaseCompiler
+       compile: (code) ->
+               source_file = new TempFile code, 'c'
+               exec_file = new TempFile()
+
+               compiler_process = spawn 'gcc', [
+                       '-Wall',
+                       '-O2',
+                       source_file.file_path,
+                       './app/compilers/secure/seccomp.c',
+                       '-lseccomp',
+                       '-std=c11',
+                       '-o',
+                       exec_file.file_path
+               ]
+
+               compiler_output = ""
+               compiler_process.stderr.on 'data', (data) -> compiler_output += data.toString()
+
+               result_code = await on_child_exit(compiler_process)
+
+               source_file.delete_file()
+               if result_code == 0
+                       return exec_file
+               else
+                       exec_file.delete_file()
+                       throw compiler_output
+
+module.exports = {
+       CCompiler: CCompiler
+}
diff --git a/executer/app/compilers/cpp_compiler.coffee b/executer/app/compilers/cpp_compiler.coffee
new file mode 100644 (file)
index 0000000..8539b80
--- /dev/null
@@ -0,0 +1,37 @@
+{ BaseCompiler } = require './base_compiler'
+{ TempFile } = require '../temp_file'
+
+{ spawn } = require 'child_process'
+{ on_child_exit } = require '../child_process'
+
+class CPPCompiler extends BaseCompiler
+       compile: (code) ->
+               source_file = new TempFile code, 'c'
+               exec_file = new TempFile()
+
+               compiler_process = spawn 'g++', [
+                       '-Wall',
+                       '-O2',
+                       source_file.file_path,
+                       './app/compilers/secure/seccomp.c',
+                       '-lseccomp',
+                       '-std=c++14',
+                       '-o',
+                       exec_file.file_path
+               ]
+
+               compiler_output = ""
+               compiler_process.stderr.on 'data', (data) -> compiler_output += data.toString()
+
+               result_code = await on_child_exit(compiler_process)
+
+               source_file.delete_file()
+               if result_code == 0
+                       return exec_file
+               else
+                       exec_file.delete_file()
+                       throw compiler_output
+
+module.exports = {
+       CPPCompiler: CPPCompiler
+}
diff --git a/executer/app/compilers/secure/seccomp.c b/executer/app/compilers/secure/seccomp.c
new file mode 100644 (file)
index 0000000..4e33023
--- /dev/null
@@ -0,0 +1,22 @@
+#include <seccomp.h>
+#include <linux/seccomp.h>
+
+void __attribute__((constructor(0))) init()
+{
+    scmp_filter_ctx ctx;
+
+    ctx = seccomp_init(SCMP_ACT_ALLOW);
+
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(open), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(openat), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(creat), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(mkdir), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(fork), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(clone), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(execve), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(unlink), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(kill), 0);
+    seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(socketcall), 0);
+
+    seccomp_load(ctx);
+}
diff --git a/executer/app/executer.coffee b/executer/app/executer.coffee
new file mode 100644 (file)
index 0000000..7c57cb7
--- /dev/null
@@ -0,0 +1,120 @@
+{ make_matcher } = require './matchers'
+{ TempFile } = require './temp_file'
+
+{ CCompiler } = require './compilers/c_compiler'
+{ CPPCompiler } = require './compilers/cpp_compiler'
+{ CExecuter } = require './executers/c_executer'
+
+clean_output = (otpt) ->
+       otpt.split '\n'
+               .map (s) -> s.trim()
+               .filter (s) -> s != ''
+
+create_matchers = (otpt) ->
+       otpt.map (s) -> make_matcher s
+
+class Executer
+       compilers: {
+               'C': new CCompiler(),
+               'CPP': new CPPCompiler()
+       }
+
+       executers: {
+               'C': new CExecuter(),
+               'CPP': new CExecuter()
+       }
+
+       process: (lang, code, test_cases, time_limit) ->
+               # Makes function async
+               if lang == "word"
+                       throw 'WORD PROBLEMS NOT SUPPORTED YET'
+               else
+                       yield from @process_code lang, code, test_cases, time_limit
+               await return
+
+       process_code: (lang, code, test_cases, time_limit) ->
+               compiler = @compilers[lang]
+               unless compiler?
+                       yield { status: 8 }
+                       return
+
+               yield { status: 2 }
+
+               exec_file = 0
+               try
+                       exec_file = await compiler.compile code
+               catch err
+                       yield { status: 9, data: err }
+                       return
+
+               executer = @executers[lang]
+               unless executer?
+                       exec_file.delete_file()
+
+                       yield { status: 8 }
+                       return
+
+               total_cases = test_cases.length
+               run_times = new Array total_cases
+               completed = 0
+
+               yield { status: 3, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+               for test_case in test_cases
+                       res = await executer.execute exec_file.file_path, test_case.input, time_limit
+
+                       switch (res.status)
+                               when 'SUCCESS'
+                                       output = clean_output res.output
+                                       expected = create_matchers (clean_output test_case.output)
+
+                                       worked = true
+                                       i = 0
+                                       for matcher in expected
+                                               unless matcher.test output[i]
+                                                       worked = false
+                                                       break
+
+                                               i++
+
+                                       unless i == output.length
+                                               worked = false
+
+                                       run_times[completed] = res.run_time
+                                       if worked
+                                               completed++
+
+                                               unless completed == total_cases
+                                                       # Running more cases
+                                                       yield { status: 3, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+                                               break
+                                       else
+                                               # Wrong answer
+                                               yield { status: 5, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+                                               exec_file.delete_file()
+                                               return
+
+                               when 'BAD_EXECUTION'
+                                       exec_file.delete_file()
+                                       # General error
+                                       yield { status: 10, data: { completed: completed, total: total_cases, run_times: run_times } }
+                                       return
+
+                               when 'TIME_LIMIT_EXCEEDED'
+                                       exec_file.delete_file()
+                                       # Time limit exceeded
+                                       yield { status: 6, data: { completed: completed, total: total_cases, run_times: run_times } }
+                                       return
+
+               exec_file.delete_file()
+               # Completed successfully
+               yield { status: 4, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+
+
+
+module.exports = {
+       Executer: Executer
+}
diff --git a/executer/app/executers/Tupfile b/executer/app/executers/Tupfile
new file mode 100644 (file)
index 0000000..f0fe651
--- /dev/null
@@ -0,0 +1 @@
+include_rules
diff --git a/executer/app/executers/base_executer.coffee b/executer/app/executers/base_executer.coffee
new file mode 100644 (file)
index 0000000..72fa679
--- /dev/null
@@ -0,0 +1,8 @@
+
+class BaseExecuter
+       execute: (path, input, time_limit) ->
+               { }
+
+module.exports = {
+       BaseExecuter: BaseExecuter
+}
diff --git a/executer/app/executers/c_executer.coffee b/executer/app/executers/c_executer.coffee
new file mode 100644 (file)
index 0000000..73b0636
--- /dev/null
@@ -0,0 +1,34 @@
+{ BaseExecuter } = require './base_executer'
+{ spawn } = require 'child_process'
+{ on_child_exit } = require '../child_process'
+
+class CExecuter extends BaseExecuter
+       execute: (path, input, time_limit) ->
+               bash_shell = spawn 'bash'
+
+               output = ""
+               bash_shell.stdout.on 'data', (data) => output += data.toString()
+
+               err_output = ""
+               bash_shell.stderr.on 'data', (data) => err_output += data.toString()
+
+               bash_shell.stdin.end "cat #{input} | timeout -s SIGKILL #{time_limit / 1000.0} #{path}"
+
+               start_time = process.hrtime()
+               res_code = await on_child_exit bash_shell
+               diff_time = process.hrtime start_time
+
+               run_time = diff_time[0] * 1000000 + Math.floor(diff_time[1] / 1000) / 1000000
+
+               if res_code == 0
+                       return { status: 'SUCCESS', output: output, run_time: run_time }
+               else if res_code == 124 or res_code == 137
+                       bash_shell.kill()
+                       return { status: 'TIME_LIMIT_EXECEED' }
+               else
+                       bash_shell.kill()
+                       return { status: 'BAD_EXECUTION', err: err_output }
+
+module.exports = {
+       CExecuter: CExecuter
+}
diff --git a/executer/app/matchers.coffee b/executer/app/matchers.coffee
new file mode 100644 (file)
index 0000000..c143526
--- /dev/null
@@ -0,0 +1,23 @@
+class OutputMatcher
+       constructor: (@line) ->
+
+       test: (str) ->
+               str is @line
+
+class RegexOutputMatcher extends OutputMatcher
+       constructor: (line) ->
+               super ""
+               @regex = new RegExp line
+
+       test: (str) ->
+               @regex.test str
+
+module.exports = {
+       make_matcher: (line) ->
+               match = /__REGEXP\((.+)\)$/.exec line
+
+               if match?
+                       new RegexOutputMatcher match[1]
+               else
+                       new OutputMatcher line
+}
index 75ea4a58f91d5bbf2b639f7882c3de0eb25e11b7..2825dbfdd07451d7a0d7f4dc3a76b569f3e959b3 100644 (file)
@@ -1,4 +1,34 @@
 request = require 'request'
+uuid = require 'uuid/v4'
+
+executer = new (require './executer').Executer()
+
+# Apparent Coffeescript can't handle a for await
+# loop so this is written in plain javascript
+```
+async function handle_job(job_id, lang, code, cases, time_limit) {
+       let processor = executer.process(lang, code, cases, time_limit)
+
+       for await (let status of processor) {
+               request.post('http://192.168.0.3:8888/executer/status_update',
+                       { json: true,
+                         form: {
+                                 request_token: process.env.REQ_SECRET,
+                                 job_id: job_id,
+                                 status: JSON.stringify(status)
+                         }
+                       },
+                       (err, res, body) => {
+                               if (err) {
+                                       return console.log(err)
+                               }
+
+                               console.log("Updated job: ", job_id, status.status)
+                       }
+               )
+       }
+}
+```
 
 module.exports = (app) ->
        app.get '/', (req, res) ->
@@ -20,3 +50,13 @@ module.exports = (app) ->
                res.json {
                        id: 'test'
                }
+
+       app.post '/request', (req, res) ->
+               cases = JSON.parse req.body.test_cases
+               job_id = uuid()
+
+               handle_job job_id, req.body.lang, req.body.code, cases, req.body.time_limit
+
+               res.json {
+                       id: job_id
+               }
diff --git a/executer/app/temp_file.coffee b/executer/app/temp_file.coffee
new file mode 100644 (file)
index 0000000..dcd3f92
--- /dev/null
@@ -0,0 +1,25 @@
+fs = require 'fs'
+path = require 'path'
+genUUID = require 'uuid/v4'
+
+class TempFile
+       constructor: (contents, extension = "tmp") ->
+               @file_name = "temp-#{genUUID()}.#{extension}"
+               @file_path = path.join '/tmp', @file_name
+
+               if contents?
+                       @populate_file contents
+
+       populate_file: (contents) ->
+               fs.writeFileSync @file_path, contents
+
+       delete_file: ->
+               try
+                       fs.unlinkSync @file_path
+               catch e
+                       0
+
+
+module.exports = {
+       TempFile: TempFile
+}
index a7fa9208b1b3061233c845e3f617d8d3fa880a0f..9088cca4db2c24c28b13f9b4e0f0bb10ff173e9a 100644 (file)
@@ -9,6 +9,7 @@
   "dependencies": {
     "express": "^4.17.1",
     "morgan": "^1.9.1",
-    "request": "^2.88.0"
+    "request": "^2.88.0",
+    "uuid": "^3.3.3"
   }
 }