secret (os.getenv 'APP_SECRET')
req_secret (os.getenv 'REQ_SECRET')
+ executer_addr 'http://192.168.0.4:8080'
+
postgres ->
-- Have to use a fixed ip since the container name
-- was not resolving correctly
{ "password", exists: true, min_length: 2 }
}
- users = Users\select "where username = ? limit 1", @params.username
+ users = Users\find username: @params.username
if #users > 0
if @crypto.verify @params.password, users[1].password_hash
@session.user_id = users[1].id
import make_controller from require "controllers.controller"
import assert_valid from require "lapis.validate"
import capture_errors from require 'lapis.application'
+import Problems from require 'models'
make_controller
inject:
{ 'problem_id', exists: true, is_integer: true }
}
- id = @executer\request @params.lang, @params.code
+ problem = Problems\find @params.problem_id
+ unless problem
+ return json: { status: 'problem not found' }
+
+ test_cases = problem\get_test_cases!
+
+ id = @executer\request @params.lang, @params.code, @params.problem_id, test_cases, problem.time_limit
json: id
), =>
import make_controller from require "controllers.controller"
+import Jobs from require 'models'
+import from_json, to_json from require 'lapis.util'
+import assert_valid from require 'lapis.validate'
+import capture_errors, yield_error from require 'lapis.application'
make_controller
middleware: { 'internal_request' }
- post: =>
- print 'Hit status'
+ post: capture_errors (=>
+ assert_valid @params, {
+ { 'job_id', exists: true }
+ { 'status', exists: true }
+ }
+
+ job = Jobs\find job_id: @params.job_id
+ unless job
+ yield_error 'Job not found'
+ return
+
+ status = from_json @params.status
+
+ job\update {
+ status: status.status
+ data: to_json status.data
+ }
+
json: { status: 'success' }
+ ), =>
+ json: { status: 'error', errors: @errors }
get: =>
@users = Users\select!
- @jobs = @user\get_c_jobs!
+ @jobs = @user\get_jobs!
render: "index"
+config = (require 'lapis.config').get!
http = require 'lapis.nginx.http'
+
import from_json, to_json from require 'lapis.util'
+import format_date from require 'lapis.db'
+import Jobs from require 'models'
class ExecuterFacade
- request: (lang, code) =>
- body = http.simple 'http://192.168.0.4:8080/submit', {
+ request: (lang, code, problem_id, test_cases, time_limit) =>
+ body = http.simple "#{config.executer_addr}/request", {
+ :lang
+ :code
+ :time_limit
+ test_cases: to_json test_cases
+ }
+
+ -- Maybe add error checking?
+
+ job_id = from_json(body).id
+
+ job = Jobs\create {
+ job_id: job_id
+ user_id: 1
+ problem_id: problem_id
+ status: Jobs.statuses\for_db 'queued'
lang: lang
code: code
+ time_initiated: format_date!
}
- from_json(body).id
-
+ job_id
-import create_table, types from require "lapis.db.schema"
+import create_table, add_column, types from require "lapis.db.schema"
{
[1]: =>
[2]: =>
create_table "jobs", {
{ "id", types.serial },
+ { "job_id", types.varchar unique: true },
{ "user_id", types.foreign_key },
{ "problem_id", types.foreign_key },
- { "status", types.text null: true },
+ { "status", types.enum },
{ "lang", types.varchar },
{ "code", types.text null: true },
{ "time_initiated", types.time },
+ { "data", types.text null: true },
"PRIMARY KEY (id)"
}
{ "description", types.text null: true },
{ "time_limit", types.integer },
+ "PRIMARY KEY (id)"
+ }
+
+ [4]: =>
+ create_table "test_cases", {
+ { "id", types.serial },
+ { "problem_id", types.foreign_key },
+ { "input", types.varchar },
+ { "output", types.varchar },
+
"PRIMARY KEY (id)"
}
}
-import Model from require "lapis.db.model"
+import Model, enum from require "lapis.db.model"
class Jobs extends Model
+ @statuses: enum {
+ queued: 1
+ compiling: 2
+ running: 3
+ completed: 4
+ wrong_answer: 5
+ timed_out: 6
+ bad_language: 7
+ bad_problem: 8
+ compile_err: 9
+ error: 10
+ }
+
@relations: {
{ 'user', belongs_to: 'Users' }
{ 'problem', belongs_to: 'Problems' }
@relations: {
{ "jobs", has_many: 'Jobs' }
+ { "test_cases", has_many: 'TestCases' }
}
--- /dev/null
+import Model from require 'lapis.db.model'
+
+class TestCases extends Model
+ @relations: {
+ { "problem", belongs_to: 'Problems' }
+ }
location /favicon.ico {
alias static/favicon.ico;
}
+
location /proxy {
internal;
rewrite_by_lua "
end
";
- resolver 8.8.8.8;
proxy_http_version 1.1;
proxy_pass $_url;
}
-: *.coffee |> coffee -c -o %B.js %f |> %B.js
+: foreach *.coffee |> coffee -c -o %B.js %f |> %B.js
--- /dev/null
+cp = require 'child_process'
+
+module.exports = {
+ on_child_exit: (cp) ->
+ new Promise (res, rej) ->
+ cp.on 'exit', (code) ->
+ if code?
+ res code
+ else
+ rej -1
+}
--- /dev/null
+include_rules
--- /dev/null
+{ TempFile } = require '../temp_file'
+
+class BaseCompiler
+ compile: (code) ->
+ Promise.resolve (new TempFile code)
+
+module.exports = {
+ BaseCompiler: BaseCompiler
+}
--- /dev/null
+{ BaseCompiler } = require './base_compiler'
+{ TempFile } = require '../temp_file'
+
+{ spawn } = require 'child_process'
+{ on_child_exit } = require '../child_process'
+
+class CCompiler extends BaseCompiler
+ compile: (code) ->
+ source_file = new TempFile code, 'c'
+ exec_file = new TempFile()
+
+ compiler_process = spawn 'gcc', [
+ '-Wall',
+ '-O2',
+ source_file.file_path,
+ './app/compilers/secure/seccomp.c',
+ '-lseccomp',
+ '-std=c11',
+ '-o',
+ exec_file.file_path
+ ]
+
+ compiler_output = ""
+ compiler_process.stderr.on 'data', (data) -> compiler_output += data.toString()
+
+ result_code = await on_child_exit(compiler_process)
+
+ source_file.delete_file()
+ if result_code == 0
+ return exec_file
+ else
+ exec_file.delete_file()
+ throw compiler_output
+
+module.exports = {
+ CCompiler: CCompiler
+}
--- /dev/null
+{ BaseCompiler } = require './base_compiler'
+{ TempFile } = require '../temp_file'
+
+{ spawn } = require 'child_process'
+{ on_child_exit } = require '../child_process'
+
+class CPPCompiler extends BaseCompiler
+ compile: (code) ->
+ source_file = new TempFile code, 'c'
+ exec_file = new TempFile()
+
+ compiler_process = spawn 'g++', [
+ '-Wall',
+ '-O2',
+ source_file.file_path,
+ './app/compilers/secure/seccomp.c',
+ '-lseccomp',
+ '-std=c++14',
+ '-o',
+ exec_file.file_path
+ ]
+
+ compiler_output = ""
+ compiler_process.stderr.on 'data', (data) -> compiler_output += data.toString()
+
+ result_code = await on_child_exit(compiler_process)
+
+ source_file.delete_file()
+ if result_code == 0
+ return exec_file
+ else
+ exec_file.delete_file()
+ throw compiler_output
+
+module.exports = {
+ CPPCompiler: CPPCompiler
+}
--- /dev/null
+#include <seccomp.h>
+#include <linux/seccomp.h>
+
+void __attribute__((constructor(0))) init()
+{
+ scmp_filter_ctx ctx;
+
+ ctx = seccomp_init(SCMP_ACT_ALLOW);
+
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(open), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(openat), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(creat), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(mkdir), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(fork), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(clone), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(execve), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(unlink), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(kill), 0);
+ seccomp_rule_add(ctx, SCMP_ACT_KILL, SCMP_SYS(socketcall), 0);
+
+ seccomp_load(ctx);
+}
--- /dev/null
+{ make_matcher } = require './matchers'
+{ TempFile } = require './temp_file'
+
+{ CCompiler } = require './compilers/c_compiler'
+{ CPPCompiler } = require './compilers/cpp_compiler'
+{ CExecuter } = require './executers/c_executer'
+
+clean_output = (otpt) ->
+ otpt.split '\n'
+ .map (s) -> s.trim()
+ .filter (s) -> s != ''
+
+create_matchers = (otpt) ->
+ otpt.map (s) -> make_matcher s
+
+class Executer
+ compilers: {
+ 'C': new CCompiler(),
+ 'CPP': new CPPCompiler()
+ }
+
+ executers: {
+ 'C': new CExecuter(),
+ 'CPP': new CExecuter()
+ }
+
+ process: (lang, code, test_cases, time_limit) ->
+ # Makes function async
+ if lang == "word"
+ throw 'WORD PROBLEMS NOT SUPPORTED YET'
+ else
+ yield from @process_code lang, code, test_cases, time_limit
+ await return
+
+ process_code: (lang, code, test_cases, time_limit) ->
+ compiler = @compilers[lang]
+ unless compiler?
+ yield { status: 8 }
+ return
+
+ yield { status: 2 }
+
+ exec_file = 0
+ try
+ exec_file = await compiler.compile code
+ catch err
+ yield { status: 9, data: err }
+ return
+
+ executer = @executers[lang]
+ unless executer?
+ exec_file.delete_file()
+
+ yield { status: 8 }
+ return
+
+ total_cases = test_cases.length
+ run_times = new Array total_cases
+ completed = 0
+
+ yield { status: 3, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+ for test_case in test_cases
+ res = await executer.execute exec_file.file_path, test_case.input, time_limit
+
+ switch (res.status)
+ when 'SUCCESS'
+ output = clean_output res.output
+ expected = create_matchers (clean_output test_case.output)
+
+ worked = true
+ i = 0
+ for matcher in expected
+ unless matcher.test output[i]
+ worked = false
+ break
+
+ i++
+
+ unless i == output.length
+ worked = false
+
+ run_times[completed] = res.run_time
+ if worked
+ completed++
+
+ unless completed == total_cases
+ # Running more cases
+ yield { status: 3, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+ break
+ else
+ # Wrong answer
+ yield { status: 5, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+ exec_file.delete_file()
+ return
+
+ when 'BAD_EXECUTION'
+ exec_file.delete_file()
+ # General error
+ yield { status: 10, data: { completed: completed, total: total_cases, run_times: run_times } }
+ return
+
+ when 'TIME_LIMIT_EXCEEDED'
+ exec_file.delete_file()
+ # Time limit exceeded
+ yield { status: 6, data: { completed: completed, total: total_cases, run_times: run_times } }
+ return
+
+ exec_file.delete_file()
+ # Completed successfully
+ yield { status: 4, data: { completed: completed, total: total_cases, run_times: run_times } }
+
+
+
+
+module.exports = {
+ Executer: Executer
+}
--- /dev/null
+include_rules
--- /dev/null
+
+class BaseExecuter
+ execute: (path, input, time_limit) ->
+ { }
+
+module.exports = {
+ BaseExecuter: BaseExecuter
+}
--- /dev/null
+{ BaseExecuter } = require './base_executer'
+{ spawn } = require 'child_process'
+{ on_child_exit } = require '../child_process'
+
+class CExecuter extends BaseExecuter
+ execute: (path, input, time_limit) ->
+ bash_shell = spawn 'bash'
+
+ output = ""
+ bash_shell.stdout.on 'data', (data) => output += data.toString()
+
+ err_output = ""
+ bash_shell.stderr.on 'data', (data) => err_output += data.toString()
+
+ bash_shell.stdin.end "cat #{input} | timeout -s SIGKILL #{time_limit / 1000.0} #{path}"
+
+ start_time = process.hrtime()
+ res_code = await on_child_exit bash_shell
+ diff_time = process.hrtime start_time
+
+ run_time = diff_time[0] * 1000000 + Math.floor(diff_time[1] / 1000) / 1000000
+
+ if res_code == 0
+ return { status: 'SUCCESS', output: output, run_time: run_time }
+ else if res_code == 124 or res_code == 137
+ bash_shell.kill()
+ return { status: 'TIME_LIMIT_EXECEED' }
+ else
+ bash_shell.kill()
+ return { status: 'BAD_EXECUTION', err: err_output }
+
+module.exports = {
+ CExecuter: CExecuter
+}
--- /dev/null
+class OutputMatcher
+ constructor: (@line) ->
+
+ test: (str) ->
+ str is @line
+
+class RegexOutputMatcher extends OutputMatcher
+ constructor: (line) ->
+ super ""
+ @regex = new RegExp line
+
+ test: (str) ->
+ @regex.test str
+
+module.exports = {
+ make_matcher: (line) ->
+ match = /__REGEXP\((.+)\)$/.exec line
+
+ if match?
+ new RegexOutputMatcher match[1]
+ else
+ new OutputMatcher line
+}
request = require 'request'
+uuid = require 'uuid/v4'
+
+executer = new (require './executer').Executer()
+
+# Apparent Coffeescript can't handle a for await
+# loop so this is written in plain javascript
+```
+async function handle_job(job_id, lang, code, cases, time_limit) {
+ let processor = executer.process(lang, code, cases, time_limit)
+
+ for await (let status of processor) {
+ request.post('http://192.168.0.3:8888/executer/status_update',
+ { json: true,
+ form: {
+ request_token: process.env.REQ_SECRET,
+ job_id: job_id,
+ status: JSON.stringify(status)
+ }
+ },
+ (err, res, body) => {
+ if (err) {
+ return console.log(err)
+ }
+
+ console.log("Updated job: ", job_id, status.status)
+ }
+ )
+ }
+}
+```
module.exports = (app) ->
app.get '/', (req, res) ->
res.json {
id: 'test'
}
+
+ app.post '/request', (req, res) ->
+ cases = JSON.parse req.body.test_cases
+ job_id = uuid()
+
+ handle_job job_id, req.body.lang, req.body.code, cases, req.body.time_limit
+
+ res.json {
+ id: job_id
+ }
--- /dev/null
+fs = require 'fs'
+path = require 'path'
+genUUID = require 'uuid/v4'
+
+class TempFile
+ constructor: (contents, extension = "tmp") ->
+ @file_name = "temp-#{genUUID()}.#{extension}"
+ @file_path = path.join '/tmp', @file_name
+
+ if contents?
+ @populate_file contents
+
+ populate_file: (contents) ->
+ fs.writeFileSync @file_path, contents
+
+ delete_file: ->
+ try
+ fs.unlinkSync @file_path
+ catch e
+ 0
+
+
+module.exports = {
+ TempFile: TempFile
+}
"dependencies": {
"express": "^4.17.1",
"morgan": "^1.9.1",
- "request": "^2.88.0"
+ "request": "^2.88.0",
+ "uuid": "^3.3.3"
}
}