From 5a25ea091ec547a9ed278c6278a5cdd143c0a8c7 Mon Sep 17 00:00:00 2001 From: tobiichi3227 <86729076+tobiichi3227@users.noreply.github.com> Date: Wed, 9 Oct 2024 21:50:42 +0800 Subject: [PATCH] Misc: E2E test, migration, upgrade script, testcase setup UI, custom problem-class (#94) * fix: missing third-party js script * chore: add redis-db and service port install option * refactor: we don't need multi-threading to do cleanup work * refactor: there should be no distinction between kernels and users Follow TOJ Spec * feat(UI): feat: add password eye Copy From TNFSH-Scoreboard * feat: add db and redis migration * feat: add simple upgrade script * feat: problem add allow submit option * feat: user add custom motto * perf: incremental refresh challenge_state The challenge_state is used to cache all challenge results. When refreshing the challenge_state, the database will recalculate all data about the challenges, but many challenges are unnecessarily updated. So the incremental refresh only updates challenges that have changed. * refactor: remove unused problem expire field * refactor: move get problem state from `list_pro()` to `map_acct_rate()` * feat: redirect to sign-in page when user is a guest * test: add e2e test * feat: prevent unauthorized user from updating password * test: add user motto test * test: add allow_submit option test * fix: change progress bar text * fix: pdf file is displayed as garbled text * feat: add a hash to check file integrity during upload * fix: escape characters when content contain code block * fix: do not push empty url to history * feat: add testcase setup ui In this PR, we add the full file management UI. Now we can use the web UI to manage our test case files, attachments, and checkers. We no longer need to upload a problem package to overwrite files to achieve this goal. We also introduce multiple language limit settings, allowing us to set specific languages. This is important for Python 3 or Java because they are usually slower than compiled languages like C or C++. We will provide multiple test case file operations, which can reduce many single and duplicate operations for users. * feat: add a simple log viewer for log params * feat: new problem class system In this PR, we added a customizable proclass for users, which they can set to be publicly shared for others to use. We improved the original proclass selection menu and added a new interface for this purpose. We also introduced a feature to collect proclass, allowing users to gather their favorite proclass. * fix: follow the target behavior when a link has a target attribute * refactor: move `self.acct` to the default namespace to reduce argument passing * perf: use batch inserts to reduce SQL execution * fix: corrected wrong acct_id in a multi-threading environment * perf: use SQL to calculate user rank instead of Python In TOJ, response time decreased from 2000ms to about 300ms * fix: only update the code hash when the code is actually submitted * ci: bypass installation restrictions of the package manager * ci: remove unnecessary zip compression, as actions/upload-artifact will handle this * fix: module not found error * refactor: make ruff and pyright happy * Refactor: Improve migration logic and error handling in main function * fix: correct wrong log message grammar * chore: remove `f.close()`, as the with statement handles this --------- Co-authored-by: lifeadventurer <108756201+LifeAdventurer@users.noreply.github.com> --- .github/workflows/tests.yml | 69 ++ ...20182633_add_challenge_contest_id_index.py | 2 + ...20240925015623_add_problem_allow_submit.py | 2 + migration/20240925163422_user_add_motto.py | 14 + ...5173122_add_incremental_challenge_state.py | 108 +++ ...40926201600_remove_problem_expire_field.py | 19 + migration/20240930220712_add_testcase.py | 12 + ...41001153200_move_test_fields_to_problem.py | 49 ++ ...1005212800_add_trigger_for_test_deleted.py | 19 + ...0241006130410_change_log_param_to_jsonb.py | 7 + migration/20241006151800_problem_class.py | 22 + ...1007235900_add_user_proclass_collection.py | 9 + migration/migration.py | 72 ++ scripts/.env.example | 2 + scripts/install.sh | 17 + scripts/ntoj.conf | 6 +- src/handlers/acct.py | 146 +++- src/handlers/base.py | 32 +- src/handlers/bulletin.py | 2 - src/handlers/chal.py | 1 - src/handlers/contests/contests.py | 2 +- src/handlers/contests/manage/acct.py | 5 +- src/handlers/contests/manage/pro.py | 3 +- src/handlers/contests/proset.py | 32 +- src/handlers/contests/scoreboard.py | 16 +- src/handlers/index.py | 19 +- src/handlers/log.py | 52 +- src/handlers/manage/acct.py | 2 +- src/handlers/manage/board.py | 26 +- src/handlers/manage/bulletin.py | 14 +- src/handlers/manage/group.py | 2 +- src/handlers/manage/judge.py | 4 +- src/handlers/manage/pro.py | 812 +++++++++++++++--- src/handlers/manage/proclass.py | 79 +- src/handlers/manage/question.py | 16 +- src/handlers/pack.py | 13 +- src/handlers/pro.py | 156 ++-- src/handlers/ques.py | 2 +- src/handlers/rank.py | 153 ++-- src/handlers/report.py | 2 +- src/handlers/submit.py | 36 +- src/runtests.py | 193 +++++ src/runtests.sh | 66 ++ src/server.py | 16 +- src/services/board.py | 5 - src/services/chal.py | 68 +- src/services/code.py | 2 +- src/services/contests.py | 4 +- src/services/log.py | 30 +- src/services/pack.py | 4 + src/services/pro.py | 381 ++++---- src/services/rate.py | 64 +- src/services/user.py | 32 +- src/static/index.js | 8 +- src/static/pack.js | 17 +- src/static/templ/{ => acct}/acct-config.html | 18 +- src/static/templ/acct/proclass-add.html | 101 +++ src/static/templ/acct/proclass-list.html | 33 + src/static/templ/acct/proclass-update.html | 119 +++ .../templ/{acct.html => acct/profile.html} | 11 +- src/static/templ/board.html | 8 +- src/static/templ/bulletin.html | 4 +- src/static/templ/chal.html | 4 +- src/static/templ/challist.html | 4 +- src/static/templ/contests/contests-list.html | 2 +- src/static/templ/contests/info.html | 10 +- src/static/templ/contests/reg.html | 18 +- src/static/templ/index.html | 27 +- src/static/templ/log.html | 12 + src/static/templ/loglist.html | 2 +- src/static/templ/manage/bulletin/update.html | 2 +- src/static/templ/manage/pro/add.html | 100 ++- src/static/templ/manage/pro/filemanager.html | 340 ++++++++ src/static/templ/manage/pro/reinit.html | 64 -- src/static/templ/manage/pro/update.html | 257 ++++-- src/static/templ/manage/pro/updatetests.html | 501 ++++++++--- src/static/templ/manage/proclass/add.html | 53 +- .../templ/manage/proclass/proclass-list.html | 16 +- src/static/templ/manage/proclass/update.html | 66 +- src/static/templ/pro.html | 28 +- src/static/templ/proset.html | 261 ++++-- src/static/templ/question.html | 6 +- src/static/templ/report-problem.html | 2 +- src/static/templ/sign.html | 48 +- src/static/templ/submit.html | 2 +- src/static/templ/user-rank.html | 2 + src/tests/e2e/__init__.py | 0 src/tests/e2e/acct.py | 126 +++ src/tests/e2e/board.py | 86 ++ src/tests/e2e/bulletin.py | 73 ++ src/tests/e2e/chal.py | 184 ++++ src/tests/e2e/contest.py | 476 ++++++++++ src/tests/e2e/main.py | 253 ++++++ src/tests/e2e/manage/__init__.py | 0 src/tests/e2e/manage/acct.py | 21 + src/tests/e2e/manage/group.py | 6 + src/tests/e2e/manage/pack.py | 147 ++++ src/tests/e2e/manage/pro/__init__.py | 0 src/tests/e2e/manage/pro/filemanager.py | 300 +++++++ src/tests/e2e/manage/pro/update.py | 147 ++++ src/tests/e2e/manage/pro/updatetests.py | 313 +++++++ src/tests/e2e/pro.py | 45 + src/tests/e2e/proclass.py | 289 +++++++ src/tests/e2e/proset.py | 59 ++ src/tests/e2e/ques.py | 65 ++ src/tests/e2e/rank.py | 22 + src/tests/e2e/submit.py | 51 ++ src/tests/e2e/util.py | 188 ++++ src/tests/main.py | 10 + src/tests/oj.sql | 609 +++++++++++++ src/tests/reinit.sh | 29 + src/tests/static_file/code/ce.cpp | 1 + .../static_file/code/float_checker_ac.cpp | 8 + .../static_file/code/float_checker_wa.cpp | 10 + src/tests/static_file/code/large.cpp | 64 ++ src/tests/static_file/code/mle.py | 1 + src/tests/static_file/code/re.cpp | 3 + src/tests/static_file/code/resig.cpp | 3 + src/tests/static_file/code/tle.cpp | 6 + src/tests/static_file/code/tle.py | 3 + src/tests/static_file/code/toj3.ac.py | 1 + src/tests/static_file/code/toj3.wa.py | 2 + src/tests/static_file/code/toj659.ac.cpp | 20 + src/tests/static_file/code/toj674.ac.cpp | 23 + src/tests/static_file/float_checker.tar.xz | Bin 0 -> 1092 bytes src/tests/static_file/float_checker/conf.json | 33 + .../static_file/float_checker/http/cont.html | 4 + .../float_checker/pass_all_checker.cpp | 12 + .../static_file/float_checker/res/check/build | 3 + .../float_checker/res/check/check.cpp | 19 + .../float_checker/res/testdata/1.in | 1 + .../float_checker/res/testdata/1.out | 1 + .../float_checker/res/testdata/2.in | 1 + .../float_checker/res/testdata/2.out | 1 + .../float_checker/res/testdata/3.in | 1 + .../float_checker/res/testdata/3.out | 1 + .../float_checker/res/testdata/4.in | 1 + .../float_checker/res/testdata/4.out | 1 + .../float_checker/res/testdata/5.in | 1 + .../float_checker/res/testdata/5.out | 1 + src/tests/static_file/toj3.tar.xz | Bin 0 -> 11332 bytes src/tests/static_file/toj3/3.in | 2 + src/tests/static_file/toj3/3.out | 1 + src/tests/static_file/toj3/3.out.incorrect | 1 + src/tests/static_file/toj3/conf.json | 23 + src/tests/static_file/toj3/http/cont.html | 29 + src/tests/static_file/toj3/res/testdata/1.in | 11 + src/tests/static_file/toj3/res/testdata/1.out | 10 + src/tests/static_file/toj659.tar.xz | Bin 0 -> 156228 bytes src/tests/static_file/toj659/conf.json | 29 + src/tests/static_file/toj659/http/cont.pdf | Bin 0 -> 127339 bytes .../static_file/toj659/http/implement.cpp | 6 + src/tests/static_file/toj659/http/stub.cpp | 28 + .../static_file/toj659/res/make/Makefile | 2 + .../static_file/toj659/res/make/stub.cpp | 58 ++ src/tests/static_file/toj674.tar.xz | Bin 0 -> 707936 bytes src/upgrade.py | 76 ++ src/url.py | 4 +- 158 files changed, 7924 insertions(+), 1148 deletions(-) create mode 100644 .github/workflows/tests.yml create mode 100644 migration/20240920182633_add_challenge_contest_id_index.py create mode 100644 migration/20240925015623_add_problem_allow_submit.py create mode 100644 migration/20240925163422_user_add_motto.py create mode 100644 migration/20240925173122_add_incremental_challenge_state.py create mode 100644 migration/20240926201600_remove_problem_expire_field.py create mode 100644 migration/20240930220712_add_testcase.py create mode 100644 migration/20241001153200_move_test_fields_to_problem.py create mode 100644 migration/20241005212800_add_trigger_for_test_deleted.py create mode 100644 migration/20241006130410_change_log_param_to_jsonb.py create mode 100644 migration/20241006151800_problem_class.py create mode 100644 migration/20241007235900_add_user_proclass_collection.py create mode 100644 migration/migration.py create mode 100644 src/runtests.py create mode 100755 src/runtests.sh rename src/static/templ/{ => acct}/acct-config.html (88%) create mode 100644 src/static/templ/acct/proclass-add.html create mode 100644 src/static/templ/acct/proclass-list.html create mode 100644 src/static/templ/acct/proclass-update.html rename src/static/templ/{acct.html => acct/profile.html} (96%) create mode 100644 src/static/templ/log.html create mode 100644 src/static/templ/manage/pro/filemanager.html delete mode 100644 src/static/templ/manage/pro/reinit.html create mode 100644 src/tests/e2e/__init__.py create mode 100644 src/tests/e2e/acct.py create mode 100644 src/tests/e2e/board.py create mode 100644 src/tests/e2e/bulletin.py create mode 100644 src/tests/e2e/chal.py create mode 100644 src/tests/e2e/contest.py create mode 100644 src/tests/e2e/main.py create mode 100644 src/tests/e2e/manage/__init__.py create mode 100644 src/tests/e2e/manage/acct.py create mode 100644 src/tests/e2e/manage/group.py create mode 100644 src/tests/e2e/manage/pack.py create mode 100644 src/tests/e2e/manage/pro/__init__.py create mode 100644 src/tests/e2e/manage/pro/filemanager.py create mode 100644 src/tests/e2e/manage/pro/update.py create mode 100644 src/tests/e2e/manage/pro/updatetests.py create mode 100644 src/tests/e2e/pro.py create mode 100644 src/tests/e2e/proclass.py create mode 100644 src/tests/e2e/proset.py create mode 100644 src/tests/e2e/ques.py create mode 100644 src/tests/e2e/rank.py create mode 100644 src/tests/e2e/submit.py create mode 100644 src/tests/e2e/util.py create mode 100644 src/tests/main.py create mode 100644 src/tests/oj.sql create mode 100755 src/tests/reinit.sh create mode 100644 src/tests/static_file/code/ce.cpp create mode 100644 src/tests/static_file/code/float_checker_ac.cpp create mode 100644 src/tests/static_file/code/float_checker_wa.cpp create mode 100644 src/tests/static_file/code/large.cpp create mode 100644 src/tests/static_file/code/mle.py create mode 100644 src/tests/static_file/code/re.cpp create mode 100644 src/tests/static_file/code/resig.cpp create mode 100644 src/tests/static_file/code/tle.cpp create mode 100644 src/tests/static_file/code/tle.py create mode 100644 src/tests/static_file/code/toj3.ac.py create mode 100644 src/tests/static_file/code/toj3.wa.py create mode 100644 src/tests/static_file/code/toj659.ac.cpp create mode 100644 src/tests/static_file/code/toj674.ac.cpp create mode 100644 src/tests/static_file/float_checker.tar.xz create mode 100755 src/tests/static_file/float_checker/conf.json create mode 100644 src/tests/static_file/float_checker/http/cont.html create mode 100644 src/tests/static_file/float_checker/pass_all_checker.cpp create mode 100755 src/tests/static_file/float_checker/res/check/build create mode 100755 src/tests/static_file/float_checker/res/check/check.cpp create mode 100755 src/tests/static_file/float_checker/res/testdata/1.in create mode 100755 src/tests/static_file/float_checker/res/testdata/1.out create mode 100755 src/tests/static_file/float_checker/res/testdata/2.in create mode 100755 src/tests/static_file/float_checker/res/testdata/2.out create mode 100755 src/tests/static_file/float_checker/res/testdata/3.in create mode 100755 src/tests/static_file/float_checker/res/testdata/3.out create mode 100755 src/tests/static_file/float_checker/res/testdata/4.in create mode 100755 src/tests/static_file/float_checker/res/testdata/4.out create mode 100755 src/tests/static_file/float_checker/res/testdata/5.in create mode 100755 src/tests/static_file/float_checker/res/testdata/5.out create mode 100644 src/tests/static_file/toj3.tar.xz create mode 100644 src/tests/static_file/toj3/3.in create mode 100644 src/tests/static_file/toj3/3.out create mode 100644 src/tests/static_file/toj3/3.out.incorrect create mode 100644 src/tests/static_file/toj3/conf.json create mode 100644 src/tests/static_file/toj3/http/cont.html create mode 100644 src/tests/static_file/toj3/res/testdata/1.in create mode 100644 src/tests/static_file/toj3/res/testdata/1.out create mode 100644 src/tests/static_file/toj659.tar.xz create mode 100755 src/tests/static_file/toj659/conf.json create mode 100755 src/tests/static_file/toj659/http/cont.pdf create mode 100755 src/tests/static_file/toj659/http/implement.cpp create mode 100755 src/tests/static_file/toj659/http/stub.cpp create mode 100755 src/tests/static_file/toj659/res/make/Makefile create mode 100755 src/tests/static_file/toj659/res/make/stub.cpp create mode 100644 src/tests/static_file/toj674.tar.xz create mode 100755 src/upgrade.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..01baae40 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,69 @@ +name: tests +on: + push: + branches: + - '**' + +permissions: + contents: read + +jobs: + e2etest: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v5 + with: + python-version: 3.12 + + - name: Install Poetry + run: | + curl -sSL https://install.python-poetry.org | python3 - + + - name: Install PostgreSQL, Redis, Dos2Unix + run: | + sudo curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | sudo tee /usr/share/keyrings/postgresql.gpg + echo deb [arch=amd64 signed-by=/usr/share/keyrings/postgresql.gpg] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main | sudo tee /etc/apt/sources.list.d/postgresql.list + sudo apt update -y + sudo apt install -f -y postgresql-16 postgresql-client-16 redis dos2unix + sudo sed -i 's/peer/trust/' /etc/postgresql/16/main/pg_hba.conf + sudo service postgresql start + sudo service redis-server start + + - name: Install Coverage + run: | + $HOME/.local/bin/poetry add --dev coverage + + - name: Install Project dependencies + run: | + sed -i '/^mkdocs-material/d' pyproject.toml # test don't need mkdocs-material + rm poetry.lock # we need to remove it because we change the pyproject file + $HOME/.local/bin/poetry install + $HOME/.local/bin/poetry add beautifulsoup4 + $HOME/.local/bin/poetry add requests + + - name: Deploy NTOJ-Judge + run: | + cd $HOME + git clone https://github.com/tobiichi3227/NTOJ-Judge + cd NTOJ-Judge/src + sudo pip3 install tornado cffi --break-system-packages + chmod +x ./runserver.sh + sudo ./runserver.sh > output.log 2>&1 & + + - name: Run e2e test + run: | + cd src + chmod +x runtests.sh + ./runtests.sh + + - name: Output judge log + run: | + cd $HOME/NTOJ-Judge/src + cat output.log + + - name: Upload Coverage Report + uses: actions/upload-artifact@v3 + with: + name: coverage-report + path: src/htmlcov diff --git a/migration/20240920182633_add_challenge_contest_id_index.py b/migration/20240920182633_add_challenge_contest_id_index.py new file mode 100644 index 00000000..e13a5397 --- /dev/null +++ b/migration/20240920182633_add_challenge_contest_id_index.py @@ -0,0 +1,2 @@ +async def dochange(db_conn, rs_conn): + await db_conn.execute('CREATE INDEX challenge_idx_contest_id ON public.challenge USING btree (contest_id)') diff --git a/migration/20240925015623_add_problem_allow_submit.py b/migration/20240925015623_add_problem_allow_submit.py new file mode 100644 index 00000000..1ae706d1 --- /dev/null +++ b/migration/20240925015623_add_problem_allow_submit.py @@ -0,0 +1,2 @@ +async def dochange(db, rs): + await db.execute('ALTER TABLE problem ADD allow_submit boolean DEFAULT true') diff --git a/migration/20240925163422_user_add_motto.py b/migration/20240925163422_user_add_motto.py new file mode 100644 index 00000000..96589fa2 --- /dev/null +++ b/migration/20240925163422_user_add_motto.py @@ -0,0 +1,14 @@ +async def dochange(db, rs): + await db.execute( + ''' + ALTER TABLE account ADD motto character varying DEFAULT ''::character varying + ''' + ) + + result = await db.fetch("SELECT last_value FROM account_acct_id_seq;") + cur_acct_id = int(result[0]["last_value"]) + + for acct_id in range(1, cur_acct_id + 1): + await rs.delete(f"account@{acct_id}") + + await rs.delete("acctlist") diff --git a/migration/20240925173122_add_incremental_challenge_state.py b/migration/20240925173122_add_incremental_challenge_state.py new file mode 100644 index 00000000..d8ea63d0 --- /dev/null +++ b/migration/20240925173122_add_incremental_challenge_state.py @@ -0,0 +1,108 @@ +async def dochange(db, rs): + await db.execute('DROP MATERIALIZED VIEW challenge_state;') + + await db.execute( + ''' + CREATE TABLE challenge_state ( + chal_id integer NOT NULL, + state integer, + runtime bigint DEFAULT 0, + memory bigint DEFAULT 0, + rate integer DEFAULT 0 + ); + ''') + await db.execute( + ''' + ALTER TABLE ONLY public.challenge_state + ADD CONSTRAINT challenge_state_forkey_chal_id FOREIGN KEY (chal_id) REFERENCES public.challenge(chal_id) ON DELETE CASCADE; + ''') + + await db.execute("ALTER TABLE challenge_state ADD CONSTRAINT challenge_state_unique_chal_id UNIQUE(chal_id);") + + + await db.execute( + ''' + CREATE TABLE last_update_time ( + view_name TEXT PRIMARY KEY, + last_update TIMESTAMP WITH TIME ZONE + ); + ''' + ) + + await db.execute("INSERT INTO last_update_time (view_name, last_update) VALUES ('challenge_state', NOW());") + + await db.execute("ALTER TABLE test ADD COLUMN last_modified TIMESTAMP WITH TIME ZONE DEFAULT NOW();") + + await db.execute("CREATE INDEX idx_test_last_modified ON test (last_modified);") + await db.execute("CREATE UNIQUE INDEX ON test_valid_rate (pro_id, test_idx);") + + await db.execute( + ''' + CREATE OR REPLACE FUNCTION update_test_last_modified() + RETURNS TRIGGER AS $$ + BEGIN + NEW.last_modified = NOW(); + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + ''') + + await db.execute( + ''' + CREATE TRIGGER test_last_modified_trigger + BEFORE UPDATE ON test + FOR EACH ROW EXECUTE FUNCTION update_test_last_modified(); + ''') + + await db.execute( + ''' + CREATE OR REPLACE FUNCTION refresh_challenge_state_incremental() + RETURNS VOID AS $$ + DECLARE + last_update_time TIMESTAMP WITH TIME ZONE; + BEGIN + SELECT last_update INTO last_update_time + FROM last_update_time + WHERE view_name = 'challenge_state'; + + WITH challenge_summary AS ( + SELECT + t.chal_id, + MAX(t.state) AS max_state, + SUM(t.runtime) AS total_runtime, + SUM(t.memory) AS total_memory, + SUM(CASE WHEN t.state = 1 THEN tvr.rate ELSE 0 END) AS total_rate + FROM test t + LEFT JOIN test_valid_rate tvr ON t.pro_id = tvr.pro_id AND t.test_idx = tvr.test_idx + WHERE t.last_modified > last_update_time + GROUP BY t.chal_id + ), + upsert_result AS ( + INSERT INTO challenge_state (chal_id, state, runtime, memory, rate) + SELECT + chal_id, + max_state, + total_runtime, + total_memory, + total_rate + FROM challenge_summary + ON CONFLICT (chal_id) DO UPDATE + SET + state = EXCLUDED.state, + runtime = EXCLUDED.runtime, + memory = EXCLUDED.memory, + rate = EXCLUDED.rate + WHERE + challenge_state.state != EXCLUDED.state OR + challenge_state.runtime != EXCLUDED.runtime OR + challenge_state.memory != EXCLUDED.memory OR + challenge_state.rate != EXCLUDED.rate + ) + + UPDATE last_update_time + SET last_update = NOW() + WHERE view_name = 'challenge_state'; + END; + $$ LANGUAGE plpgsql; + ''') + await db.execute('SELECT refresh_challenge_state_incremental();') diff --git a/migration/20240926201600_remove_problem_expire_field.py b/migration/20240926201600_remove_problem_expire_field.py new file mode 100644 index 00000000..2dc4c900 --- /dev/null +++ b/migration/20240926201600_remove_problem_expire_field.py @@ -0,0 +1,19 @@ +async def dochange(db, rs): + await db.execute('DROP MATERIALIZED VIEW test_valid_rate;') + await db.execute('ALTER TABLE problem DROP COLUMN expire;') + await db.execute( + ''' + CREATE MATERIALIZED VIEW public.test_valid_rate AS + SELECT test_config.pro_id, + test_config.test_idx, + count(DISTINCT account.acct_id) AS count, + test_config.weight AS rate + FROM (((public.test + JOIN public.account ON ((test.acct_id = account.acct_id))) + JOIN public.problem ON (((((test.pro_id = problem.pro_id)) AND (test.state = 1))))) + RIGHT JOIN public.test_config ON (((test.pro_id = test_config.pro_id) AND (test.test_idx = test_config.test_idx)))) + GROUP BY test_config.pro_id, test_config.test_idx, test_config.weight + WITH NO DATA; + ''') + await db.execute('REFRESH MATERIALIZED VIEW test_valid_rate;') + await rs.delete('prolist') diff --git a/migration/20240930220712_add_testcase.py b/migration/20240930220712_add_testcase.py new file mode 100644 index 00000000..d3804838 --- /dev/null +++ b/migration/20240930220712_add_testcase.py @@ -0,0 +1,12 @@ +import json + +async def dochange(db, rs): + test_configs = await db.fetch('SELECT pro_id, test_idx, metadata FROM test_config;') + + for pro_id, test_group_idx, metadata in test_configs: + metadata = json.loads(metadata) + for i in range(len(metadata["data"])): + metadata["data"][i] = str(metadata["data"][i]) + + await db.execute('UPDATE test_config SET metadata = $1 WHERE pro_id = $2 AND test_idx = $3', + json.dumps(metadata), pro_id, test_group_idx) diff --git a/migration/20241001153200_move_test_fields_to_problem.py b/migration/20241001153200_move_test_fields_to_problem.py new file mode 100644 index 00000000..244ff967 --- /dev/null +++ b/migration/20241001153200_move_test_fields_to_problem.py @@ -0,0 +1,49 @@ +import json + +async def dochange(db, rs): + CHECK_TYPES = { + "diff": 0, + "diff-strict": 1, + "diff-float": 2, + "ioredir": 3, + "cms": 4 + } + + await db.execute("ALTER TABLE problem ADD check_type integer DEFAULT 0") + await db.execute("ALTER TABLE problem ADD is_makefile boolean DEFAULT false") + await db.execute(""" + ALTER TABLE problem ADD "limit" jsonb DEFAULT '{"default": {"timelimit": 0, "memlimit":0}}'::jsonb + """) + await db.execute("ALTER TABLE problem ADD chalmeta jsonb DEFAULT '{}'::jsonb") + + res = await db.fetch("SELECT pro_id FROM problem;") + for pro in res: + pro_id = pro['pro_id'] + limit = { + 'default': { + 'timelimit': 0, + 'memlimit': 0, + } + } + f_check_type = 0 + f_is_makefile = False + f_chalmeta = {} + + res = await db.fetch('SELECT check_type, compile_type, chalmeta, timelimit, memlimit FROM test_config WHERE pro_id = $1', pro_id) + for check_type, compile_type, chalmeta, timelimit, memlimit in res: + f_check_type = CHECK_TYPES[check_type] + f_is_makefile = compile_type == 'makefile' + f_chalmeta = json.loads(chalmeta) + limit['default']['timelimit'] = timelimit + limit['default']['memlimit'] = memlimit + + await db.execute("UPDATE problem SET check_type = $1, is_makefile = $2, \"limit\" = $3, chalmeta = $4 WHERE pro_id = $5", + f_check_type, f_is_makefile, json.dumps(limit), json.dumps(f_chalmeta), pro_id) + + + await db.execute('ALTER TABLE test_config DROP COLUMN check_type;') + await db.execute('ALTER TABLE test_config DROP COLUMN score_type;') + await db.execute('ALTER TABLE test_config DROP COLUMN compile_type;') + await db.execute('ALTER TABLE test_config DROP COLUMN chalmeta;') + await db.execute('ALTER TABLE test_config DROP COLUMN timelimit;') + await db.execute('ALTER TABLE test_config DROP COLUMN memlimit;') diff --git a/migration/20241005212800_add_trigger_for_test_deleted.py b/migration/20241005212800_add_trigger_for_test_deleted.py new file mode 100644 index 00000000..8500e105 --- /dev/null +++ b/migration/20241005212800_add_trigger_for_test_deleted.py @@ -0,0 +1,19 @@ +async def dochange(db, rs): + await db.execute( + ''' + CREATE OR REPLACE FUNCTION delete_challenge_state() + RETURNS TRIGGER AS $$ + BEGIN + DELETE FROM challenge_state WHERE chal_id = OLD.chal_id; + RETURN OLD; + END; + $$ LANGUAGE plpgsql; + ''') + + await db.execute( + ''' + CREATE TRIGGER trigger_delete_challenge_state + AFTER DELETE ON test + FOR EACH ROW + EXECUTE FUNCTION delete_challenge_state(); + ''') diff --git a/migration/20241006130410_change_log_param_to_jsonb.py b/migration/20241006130410_change_log_param_to_jsonb.py new file mode 100644 index 00000000..678777c6 --- /dev/null +++ b/migration/20241006130410_change_log_param_to_jsonb.py @@ -0,0 +1,7 @@ +async def dochange(db, rs): + await db.execute( + "ALTER TABLE log ALTER COLUMN params TYPE jsonb USING params::jsonb" + ) + await db.execute( + "ALTER TABLE log ALTER COLUMN params SET DEFAULT '{}'::jsonb" + ) diff --git a/migration/20241006151800_problem_class.py b/migration/20241006151800_problem_class.py new file mode 100644 index 00000000..fb08c4f4 --- /dev/null +++ b/migration/20241006151800_problem_class.py @@ -0,0 +1,22 @@ +class ProClassConst: + OFFICIAL_PUBLIC = 0 + OFFICIAL_HIDDEN = 1 + USER_PUBLIC = 2 + USER_HIDDEN = 3 + + +async def dochange(db, rs): + # NOTE: rename + await db.execute("ALTER TABLE pubclass RENAME TO proclass") + await db.execute("ALTER SEQUENCE pubclass_pubclass_id_seq RENAME TO proclass_proclass_id_seq") + await db.execute("ALTER TABLE proclass RENAME COLUMN pubclass_id TO proclass_id") + await db.execute("ALTER TABLE proclass RENAME CONSTRAINT pubclass_pkey TO proclass_pkey") + + await db.execute('''ALTER TABLE proclass ADD "desc" text DEFAULT \'\'''') + await db.execute("ALTER TABLE proclass ADD acct_id integer") + await db.execute('ALTER TABLE proclass ADD "type" integer') + await db.execute( + "ALTER TABLE proclass ADD CONSTRAINT proclass_forkey_acct_id FOREIGN KEY (acct_id) REFERENCES account(acct_id) ON DELETE CASCADE" + ) + await db.execute('UPDATE proclass SET "type" = $1', ProClassConst.OFFICIAL_PUBLIC) + await db.execute('ALTER TABLE proclass ALTER COLUMN "type" SET NOT NULL') diff --git a/migration/20241007235900_add_user_proclass_collection.py b/migration/20241007235900_add_user_proclass_collection.py new file mode 100644 index 00000000..a4d9ad45 --- /dev/null +++ b/migration/20241007235900_add_user_proclass_collection.py @@ -0,0 +1,9 @@ +async def dochange(db, rs): + await db.execute("ALTER TABLE account ADD proclass_collection integer[] NOT NULL DEFAULT '{}'::integer[]") + result = await db.fetch("SELECT last_value FROM account_acct_id_seq;") + cur_acct_id = int(result[0]['last_value']) + + for acct_id in range(1, cur_acct_id + 1): + await rs.delete(f"account@{acct_id}") + + await rs.delete('acctlist') diff --git a/migration/migration.py b/migration/migration.py new file mode 100644 index 00000000..43faae90 --- /dev/null +++ b/migration/migration.py @@ -0,0 +1,72 @@ +import asyncio +import importlib +import inspect +import os +import traceback + +import asyncpg +import config +from redis import asyncio as aioredis + + +async def main(): + db_conn = await asyncpg.connect( + database=config.DBNAME_OJ, user=config.DBUSER_OJ, password=config.DBPW_OJ, host='localhost' + ) + redis_conn = await aioredis.Redis(host='localhost', port=6379, db=config.REDIS_DB) + + db_version = None + result = await db_conn.fetch( + ''' + SELECT EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = 'db_version' + ) AS is_exist; + ''' + ) + result = result[0] + + if not result['is_exist']: + # create version table + await db_conn.execute( + ''' + CREATE TABLE db_version ( + "version" integer + ); + ''' + ) + await db_conn.execute('INSERT INTO db_version ("version") VALUES (0)') + db_version = 0 + + else: + result = await db_conn.fetch("SELECT * FROM db_version") + db_version = int(result[0]['version']) + + migration_files = sorted( + (file for file in os.listdir('./') if file.endswith('.py') and file not in {"migration.py", "config.py"}), + key=lambda filename: filename[:14], # Sort by timestamp (yyyymmddHHMMSS) + ) + + for version, filename in enumerate(migration_files, start=1): + if version <= db_version: + continue + + if filename.endswith('.py'): + module_name = filename[:-3] + module = importlib.import_module(module_name) + + try: + if hasattr(module, 'dochange') and inspect.iscoroutinefunction(module.dochange): + await module.dochange(db_conn, redis_conn) + + except Exception as e: + print(f"Error running migration file {filename}: {e}") + traceback.print_exc() + continue + + await db_conn.execute('UPDATE db_version SET "version"=$1', version) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/scripts/.env.example b/scripts/.env.example index c9173fd2..b1a42b6a 100644 --- a/scripts/.env.example +++ b/scripts/.env.example @@ -1,4 +1,6 @@ INSTALL_DIR=/srv +PORT=5500 +REDIS_DB=1 DB_NAME=ntoj DB_USERNAME=ntoj DB_PASSWORD=DB_PASSWORD diff --git a/scripts/install.sh b/scripts/install.sh index 891eaa26..5bdc8097 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -10,6 +10,14 @@ if [ -z $INSTALL_DIR ]; then INSTALL_DIR=/srv fi +if [ -z $REDIS_DB ]; then + REDIS_DB=1 +fi + +if [ -z $PORT ]; then + PORT=5500 +fi + if [ -z $DB_NAME ]; then DB_NAME=ntoj fi @@ -111,6 +119,7 @@ sudo systemctl enable --now nginx.service ## Replace nginx root directory path INSTALL_DIR_ESCAPE=$(echo ${INSTALL_DIR} | sed 's/[\/\$]/\\\//g') sed -i "s/INSTALL_DIR/${INSTALL_DIR_ESCAPE}/" ./ntoj.conf +sed -i "s/PORT/${PORT}/" ./ntoj.conf sudo cp ./ntoj.conf /etc/nginx/conf.d/ sudo sed -i "s/www-data/root/" /etc/nginx/nginx.conf sudo rm /etc/nginx/sites-enabled/default @@ -127,6 +136,8 @@ cd ${INSTALL_DIR}/ntoj/ COOKIE_SEC=$(head -c 32 /dev/urandom | xxd -ps -c 128) UNLOCK_PWD=$($HOME/.local/bin/poetry run python3 ${CURRENT_PWD}/get_unlock_pwd.py <<<${UNLOCK_PASSWORD}) cat </dev/null +PORT = '${PORT}' +REDIS_DB = '${REDIS_DB}' DBNAME_OJ = '${DB_NAME}' DBUSER_OJ = '${DB_USERNAME}' DBPW_OJ = '${DB_PASSWORD}' @@ -145,3 +156,9 @@ cp ${INSTALL_DIR}/ntoj/config.py ${CURRENT_PWD}/config.py $HOME/.local/bin/poetry run python3 ${CURRENT_PWD}/add_admin.py ${ADMIN_NAME} ${ADMIN_PASSWORD} ${ADMIN_MAIL} cd ${CURRENT_PWD} rm config.py + +# Run migration +cd ../migration +cp ${INSTALL_DIR}/ntoj/config.py ./config.py +$HOME/.local/bin/poetry -C ${INSTALL_DIR}/ntoj run python3 migration.py +rm config.py diff --git a/scripts/ntoj.conf b/scripts/ntoj.conf index 87f22029..98697e27 100644 --- a/scripts/ntoj.conf +++ b/scripts/ntoj.conf @@ -25,7 +25,7 @@ server { location /oj/be/ { rewrite ^/oj/be/(.*) /$1 break; - proxy_pass http://localhost:5500; + proxy_pass http://localhost:PORT; proxy_read_timeout 14400s; proxy_http_version 1.1; proxy_set_header X-Real-Ip $remote_addr; @@ -35,7 +35,7 @@ server { location ~ ^/oj/pro/(\d+)/(.+) { rewrite ^/oj/pro/(\d+)/(.+) /pro/$1/$2 break; - proxy_pass http://localhost:5500; + proxy_pass http://localhost:PORT; proxy_read_timeout 14400s; proxy_http_version 1.1; proxy_set_header X-Real-Ip $remote_addr; @@ -45,7 +45,7 @@ server { location ~ ^/oj/contests/(\d+)/pro/(\d+)/(.+) { rewrite ^/oj/contests/(\d+)/pro/(\d+)/(.+) /contests/$1/pro/$2/$3 break; - proxy_pass http://localhost:5500; + proxy_pass http://localhost:PORT; proxy_read_timeout 14400s; proxy_http_version 1.1; proxy_set_header X-Real-Ip $remote_addr; diff --git a/src/handlers/acct.py b/src/handlers/acct.py index 20a9168c..2816cfa9 100644 --- a/src/handlers/acct.py +++ b/src/handlers/acct.py @@ -1,11 +1,15 @@ import math import re +import tornado.web + from handlers.base import RequestHandler, reqenv, require_permission from services.log import LogService -from services.pro import ProService +from services.pro import ProService, ProClassService, ProClassConst from services.rate import RateService from services.user import UserConst, UserService +from services.chal import ChalConst +from utils.numeric import parse_list_str class AcctHandler(RequestHandler): @@ -37,23 +41,24 @@ async def get(self, acct_id): prolist2 = [] + ac_pro_cnt = 0 for pro in prolist: pro_id = pro['pro_id'] tmp = {'pro_id': pro_id, 'score': -1} if pro_id in ratemap: tmp['score'] = ratemap[pro_id]['rate'] + ac_pro_cnt += ratemap[pro_id]['state'] == ChalConst.STATE_AC prolist2.append(tmp) - isadmin = self.acct.is_kernel() rate_data['rate'] = math.floor(rate_data['rate']) - rate_data['ac_pro_cnt'] = sum(t.get('rate') == 100 for t in ratemap.values()) + rate_data['ac_pro_cnt'] = ac_pro_cnt # force https, add by xiplus, 2018/8/24 acct.photo = re.sub(r'^http://', 'https://', acct.photo) acct.cover = re.sub(r'^http://', 'https://', acct.cover) - await self.render('acct', acct=acct, rate=rate_data, prolist=prolist2, isadmin=isadmin) + await self.render('acct/profile', acct=acct, rate=rate_data, prolist=prolist2) class AcctConfigHandler(RequestHandler): @@ -68,7 +73,7 @@ async def get(self, acct_id=None): self.error(err) return - await self.render('acct-config', acct=acct, isadmin=self.acct.is_kernel()) + await self.render('acct/acct-config', acct=acct) @reqenv @require_permission([UserConst.ACCTTYPE_USER, UserConst.ACCTTYPE_KERNEL]) @@ -79,6 +84,7 @@ async def post(self): name = self.get_argument('name') photo = self.get_argument('photo') cover = self.get_argument('cover') + motto = self.get_argument('motto') target_acct_id = self.get_argument('acct_id') if target_acct_id != str(self.acct.acct_id): @@ -86,7 +92,7 @@ async def post(self): return err, _ = await UserService.inst.update_acct( - self.acct.acct_id, self.acct.acct_type, name, photo, cover + self.acct.acct_id, self.acct.acct_type, name, photo, cover, motto, self.acct.proclass_collection, ) if err: self.error(err) @@ -98,7 +104,11 @@ async def post(self): elif reqtype == 'reset': old = self.get_argument('old') pw = self.get_argument('pw') - target_acct_id = self.get_argument('acct_id') + target_acct_id = int(self.get_argument('acct_id')) + + if not (self.acct.acct_id == target_acct_id or self.acct.is_kernel()): + self.error('Eacces') + return err, _ = await UserService.inst.update_pw(target_acct_id, old, pw, self.acct.is_kernel()) if err: @@ -115,6 +125,128 @@ async def post(self): self.error('Eunk') +class AcctProClassHandler(RequestHandler): + @reqenv + async def get(self, acct_id): + acct_id = int(acct_id) + try: + page = self.get_argument('page') + except tornado.web.HTTPError: + page = None + + if page is None: + _, proclass_list = await ProClassService.inst.get_proclass_list() + proclass_list = filter(lambda proclass: proclass['acct_id'] == self.acct.acct_id, proclass_list) + await self.render('acct/proclass-list', proclass_list=proclass_list) + + elif page == "add": + await self.render('acct/proclass-add', user=self.acct) + + elif page == "update": + proclass_id = int(self.get_argument('proclassid')) + _, proclass = await ProClassService.inst.get_proclass(proclass_id) + if proclass['acct_id'] != self.acct.acct_id: + self.error('Eacces') + return + + await self.render('acct/proclass-update', proclass_id=proclass_id, proclass=proclass) + + @reqenv + async def post(self, acct_id): + reqtype = self.get_argument('reqtype') + acct_id = int(acct_id) + + if reqtype == 'add': + name = self.get_argument('name') + desc = self.get_argument('desc') + proclass_type = int(self.get_argument('type')) + p_list_str = self.get_argument('list') + p_list = parse_list_str(p_list_str) + + if proclass_type not in [ProClassConst.USER_PUBLIC, ProClassConst.USER_HIDDEN]: + self.error('Eparam') + return + + if len(p_list) == 0: + self.error('E') + return + + await LogService.inst.add_log( + f"{self.acct.name} add proclass name={name}", 'user.proclass.add', + { + "list": p_list, + "desc": desc, + "proclass_type": proclass_type, + } + ) + err, proclass_id = await ProClassService.inst.add_proclass(name, p_list, desc, acct_id, proclass_type) + if err: + self.error(err) + return + + self.finish(str(proclass_id)) + + elif reqtype == "update": + proclass_id = int(self.get_argument('proclass_id')) + name = self.get_argument('name') + desc = self.get_argument('desc') + proclass_type = int(self.get_argument('type')) + p_list_str = self.get_argument('list') + p_list = parse_list_str(p_list_str) + + _, proclass = await ProClassService.inst.get_proclass(proclass_id) + + if proclass['acct_id'] != self.acct.acct_id: + await LogService.inst.add_log( + f"{self.acct.name} tried to remove proclass name={proclass['name']}, but this proclass is not owned by them", 'user.proclass.update.failed' + ) + self.error('Eacces') + return + + if proclass_type not in [ProClassConst.USER_PUBLIC, ProClassConst.USER_HIDDEN]: + self.error('Eparam') + return + + if len(p_list) == 0: + self.error('E') + return + + await LogService.inst.add_log( + f"{self.acct.name} update proclass name={name}", 'user.proclass.update', + { + "list": p_list, + "desc": desc, + "proclass_type": proclass_type, + } + ) + err = await ProClassService.inst.update_proclass(proclass_id, name, p_list, desc, proclass_type) + if err: + self.error(err) + return + + self.finish('S') + + elif reqtype == "remove": + proclass_id = int(self.get_argument('proclass_id')) + err, proclass = await ProClassService.inst.get_proclass(proclass_id) + + if err: + self.error(err) + return + + if proclass['acct_id'] != self.acct.acct_id: + await LogService.inst.add_log( + f"{self.acct.name} tried to remove proclass name={proclass['name']}, but this proclass is not owned by them", 'user.proclass.remove.failed' + ) + self.error('Eacces') + return + + await LogService.inst.add_log( + f"{self.acct.name} remove proclass name={proclass['name']}.", 'user.proclass.remove' + ) + await ProClassService.inst.remove_proclass(proclass_id) + + self.finish('S') class SignHandler(RequestHandler): @reqenv diff --git a/src/handlers/base.py b/src/handlers/base.py index c6d21773..dae5440b 100644 --- a/src/handlers/base.py +++ b/src/handlers/base.py @@ -43,19 +43,14 @@ def error(self, err): async def render(self, templ, **kwargs): class _encoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, datetime.datetime): - return obj.isoformat() + def default(self, o): + if isinstance(o, datetime.datetime): + return o.isoformat() else: - return json.JSONEncoder.default(self, obj) + return json.JSONEncoder.default(self, o) - from services.user import UserConst - if not self.acct.is_guest(): - kwargs['acct_id'] = self.acct.acct_id - - else: - kwargs['acct_id'] = '' + kwargs['user'] = self.acct if self.res_json is True: self.finish(json.dumps(kwargs, cls=_encoder)) @@ -83,7 +78,7 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.settings['websocket_ping_interval'] = 10 - def check_origin(self, origin: str) -> bool: + def check_origin(self, _: str) -> bool: return True def on_close(self) -> None: @@ -113,16 +108,31 @@ async def wrap(self, *args, **kwargs): return wrap +GOTO_SIGN=""" + +""" def require_permission(acct_type): def decorator(func): async def wrap(self, *args, **kwargs): if isinstance(acct_type, list): if self.acct.acct_type not in acct_type: + if self.acct.is_guest(): + self.finish(GOTO_SIGN) + return + await self.finish('Eacces') return elif self.acct.acct_type != acct_type: + if self.acct.is_guest(): + self.finish(GOTO_SIGN) + return + await self.finish('Eacces') return diff --git a/src/handlers/bulletin.py b/src/handlers/bulletin.py index 16afe9f2..da0b12b5 100644 --- a/src/handlers/bulletin.py +++ b/src/handlers/bulletin.py @@ -1,7 +1,5 @@ import asyncio -from redis import asyncio as aioredis - from handlers.base import RequestHandler, WebSocketSubHandler, reqenv from services.bulletin import BulletinService from services.judge import JudgeServerClusterService diff --git a/src/handlers/chal.py b/src/handlers/chal.py index dcc2c355..667e0576 100644 --- a/src/handlers/chal.py +++ b/src/handlers/chal.py @@ -88,7 +88,6 @@ async def get(self): pageoff=pageoff, ppro_id=ppro_id, pacct_id=pacct_id, - acct=self.acct, chalids=json.dumps(chalids), isadmin=isadmin, ) diff --git a/src/handlers/contests/contests.py b/src/handlers/contests/contests.py index deadfef8..4db7bda7 100644 --- a/src/handlers/contests/contests.py +++ b/src/handlers/contests/contests.py @@ -12,4 +12,4 @@ class ContestListHandler(RequestHandler): @reqenv async def get(self): _, contest_list = await ContestService.inst.get_contest_list() - await self.render('contests/contests-list', contests=contest_list, acct=self.acct) + await self.render('contests/contests-list', contests=contest_list) diff --git a/src/handlers/contests/manage/acct.py b/src/handlers/contests/manage/acct.py index c48ead23..c36c8fd1 100644 --- a/src/handlers/contests/manage/acct.py +++ b/src/handlers/contests/manage/acct.py @@ -34,6 +34,9 @@ async def post(self): changed_list = self.contest.acct_list elif list_type == "admin": changed_list = self.contest.admin_list + else: + self.error('Eparam') + return if reqtype == "add": acct_id = int(acct_id) @@ -111,7 +114,7 @@ async def post(self): acct_list = parse_list_str(acct_id) acct_list = filter(lambda acct_id: self.contest.is_member(acct_id=acct_id), acct_list) - changed_list = filter(lambda acct_id: acct_id not in acct_list, changed_list) + changed_list = list(filter(lambda acct_id: acct_id not in acct_list, changed_list)) # NOTE: Prevent admin remove self if self.acct.acct_id in changed_list and self.contest.is_admin(self.acct): diff --git a/src/handlers/contests/manage/pro.py b/src/handlers/contests/manage/pro.py index 969e02fa..fe6f2f8e 100644 --- a/src/handlers/contests/manage/pro.py +++ b/src/handlers/contests/manage/pro.py @@ -15,7 +15,7 @@ class ContestManageProHandler(RequestHandler): async def get(self): pro_list = [] for pro_id in self.contest.pro_list: - err, pro = await ProService.inst.get_pro(pro_id, is_contest=True) + _, pro = await ProService.inst.get_pro(pro_id, is_contest=True) pro_list.append(pro) await self.render('contests/manage/pro', page='pro', @@ -114,7 +114,6 @@ async def post(self): # TODO: send notify to user async def _rechal(rechals): for chal_id, comp_type in rechals: - file_ext = ChalConst.FILE_EXTENSION[comp_type] _, _ = await ChalService.inst.reset_chal(chal_id) _, _ = await ChalService.inst.emit_chal( chal_id, diff --git a/src/handlers/contests/proset.py b/src/handlers/contests/proset.py index a30de4d9..12b10315 100644 --- a/src/handlers/contests/proset.py +++ b/src/handlers/contests/proset.py @@ -2,7 +2,7 @@ from handlers.base import reqenv, RequestHandler from handlers.contests.base import contest_require_permission -from services.pro import ProService, ProConst +from services.pro import ProService from services.rate import RateService @@ -22,38 +22,16 @@ async def get(self): _, acct_rates = await RateService.inst.map_rate_acct(self.acct, contest_id=self.contest.contest_id) _, prolist = await ProService.inst.list_pro(self.acct, is_contest=True) - # TODO: Move this to services - statemap = {} - async with self.db.acquire() as con: - result = await con.fetch( - f""" - SELECT "problem"."pro_id", - MIN("challenge_state"."state") AS "state" - FROM "challenge" - INNER JOIN "challenge_state" - ON "challenge"."chal_id" = "challenge_state"."chal_id" AND "challenge"."acct_id" = $1 AND "challenge"."contest_id" = $2 - INNER JOIN "problem" - ON "challenge"."pro_id" = "problem"."pro_id" - WHERE "problem"."status" <= {ProConst.STATUS_CONTEST} - GROUP BY "problem"."pro_id" - ORDER BY "pro_id" ASC; - """, - self.acct.acct_id, - self.contest.contest_id, - ) - - statemap = {pro_id: state for pro_id, state in result} prolist_order = {pro_id: idx for idx, pro_id in enumerate(self.contest.pro_list)} - prolist = list(sorted(filter(lambda pro: pro['pro_id'] in self.contest.pro_list, prolist), - key=lambda pro: prolist_order[pro['pro_id']])) - for pro in prolist: - pro_id = pro["pro_id"] - pro["state"] = statemap.get(pro_id) + prolist = sorted(filter(lambda pro: pro['pro_id'] in self.contest.pro_list, prolist), + key=lambda pro: prolist_order[pro['pro_id']]) def get_score(pro): pro['score'] = 0 + pro['state'] = None if pro['pro_id'] in acct_rates: pro['score'] += acct_rates[pro['pro_id']]['rate'] + pro['state'] = acct_rates[pro['pro_id']]['state'] return pro diff --git a/src/handlers/contests/scoreboard.py b/src/handlers/contests/scoreboard.py index 027530ca..717c2c59 100644 --- a/src/handlers/contests/scoreboard.py +++ b/src/handlers/contests/scoreboard.py @@ -14,21 +14,21 @@ class _JsonDatetimeEncoder(json.JSONEncoder): - def default(self, obj): - if isinstance(obj, datetime.datetime): - return obj.isoformat() + def default(self, o): + if isinstance(o, datetime.datetime): + return o.isoformat() - elif isinstance(obj, datetime.timedelta): - total_seconds = int(obj.total_seconds()) + elif isinstance(o, datetime.timedelta): + total_seconds = int(o.total_seconds()) minutes = total_seconds // 60 seconds = total_seconds % 60 return f"{minutes}:{seconds:02}" - elif isinstance(obj, Decimal): - return int(obj) + elif isinstance(o, Decimal): + return int(o) else: - return json.JSONEncoder.default(self, obj) + return json.JSONEncoder.default(self, o) class ContestScoreboardHandler(RequestHandler): diff --git a/src/handlers/index.py b/src/handlers/index.py index 72763517..451d2bc8 100644 --- a/src/handlers/index.py +++ b/src/handlers/index.py @@ -12,7 +12,6 @@ async def get(self, page: str): contest_manage = False contest_id = 0 - manage = False reply = False ask_cnt = 0 @@ -28,23 +27,17 @@ async def get(self, page: str): if contest.is_admin(self.acct): contest_manage = True - if self.acct.is_guest(): - name = '' + if self.acct.is_kernel(): + _, _, ask_cnt = await QuestionService.inst.get_asklist() - else: - name = self.acct.name - - if self.acct.is_kernel(): - manage = True - _, _, ask_cnt = await QuestionService.inst.get_asklist() + elif not self.acct.is_guest(): + reply = await QuestionService.inst.have_reply(self.acct.acct_id) - else: - reply = await QuestionService.inst.have_reply(self.acct.acct_id) - - await self.render('index', name=name, manage=manage, ask_cnt=ask_cnt, reply=reply, + await self.render('index', ask_cnt=ask_cnt, reply=reply, is_in_contest=is_in_contest, contest_manage=contest_manage, contest_id=contest_id) + class AbouotHandler(RequestHandler): @reqenv async def get(self): diff --git a/src/handlers/log.py b/src/handlers/log.py index 3be07741..5c3f18d8 100644 --- a/src/handlers/log.py +++ b/src/handlers/log.py @@ -9,29 +9,39 @@ class LogHandler(RequestHandler): @reqenv @require_permission(UserConst.ACCTTYPE_KERNEL) - async def get(self): - try: - pageoff = int(self.get_argument('pageoff')) - except tornado.web.HTTPError: - pageoff = 0 - - try: - logtype = str(self.get_argument('logtype')) - except tornado.web.HTTPError: - logtype = None - - err, logtype_list = await LogService.inst.get_log_type() + async def get(self, log_id=None): + if log_id is None: + try: + pageoff = int(self.get_argument('pageoff')) + except tornado.web.HTTPError: + pageoff = 0 + + try: + logtype = str(self.get_argument('logtype')) + except tornado.web.HTTPError: + logtype = None + + err, logtype_list = await LogService.inst.get_log_type() + + err, log = await LogService.inst.list_log(pageoff, 50, logtype) + if err: + self.error(err) + return + + await self.render( + 'loglist', + pageoff=pageoff, + lognum=log['lognum'], + loglist=log['loglist'], + logtype_list=logtype_list, + cur_logtype=logtype, + ) + return - err, log = await LogService.inst.list_log(pageoff, 50, logtype) + err, log = await LogService.inst.view_log(log_id) if err: self.error(err) return - await self.render( - 'loglist', - pageoff=pageoff, - lognum=log['lognum'], - loglist=log['loglist'], - logtype_list=logtype_list, - cur_logtype=logtype, - ) + await self.render('log', log=log) + diff --git a/src/handlers/manage/acct.py b/src/handlers/manage/acct.py index 469aedd7..23a71494 100644 --- a/src/handlers/manage/acct.py +++ b/src/handlers/manage/acct.py @@ -44,7 +44,7 @@ async def post(self, page=None): 'manage.acct.update', ) - err, _ = await UserService.inst.update_acct(acct_id, acct_type, acct.name, acct.photo, acct.cover) + err, _ = await UserService.inst.update_acct(acct_id, acct_type, acct.name, acct.photo, acct.cover, acct.motto, acct.proclass_collection) if err: self.error(err) return diff --git a/src/handlers/manage/board.py b/src/handlers/manage/board.py index 9b3a1209..2f586da0 100644 --- a/src/handlers/manage/board.py +++ b/src/handlers/manage/board.py @@ -51,7 +51,6 @@ async def post(self, page=None): pro_list_str = str(self.get_argument('pro_list')) acct_list_str = str(self.get_argument('acct_list')) - await LogService.inst.add_log(f"{self.acct.name} was added the contest \"{name}\".", 'manage.board.add') err, start = trantime(start) if err: self.error(err) @@ -64,6 +63,17 @@ async def post(self, page=None): acct_list = await self._get_acct_list(acct_list_str) pro_list = self._get_pro_list(pro_list_str) + await LogService.inst.add_log( + f"{self.acct.name} was added to the contest \"{name}\".", 'manage.board.add', + { + "name": name, + "status": status, + "start": start, + "end": end, + "pro_list": pro_list, + "acct_list": acct_list, + } + ) await BoardService.inst.add_board(name, status, start, end, pro_list, acct_list) @@ -75,9 +85,6 @@ async def post(self, page=None): status = int(self.get_argument('status')) start = self.get_argument('start') end = self.get_argument('end') - await LogService.inst.add_log( - f"{self.acct.name} was updated the contest \"{name}\".", 'manage.board.update' - ) err, start = trantime(start) if err: self.error(err) @@ -93,6 +100,17 @@ async def post(self, page=None): acct_list = await self._get_acct_list(acct_list_str) pro_list = self._get_pro_list(pro_list_str) + await LogService.inst.add_log( + f"{self.acct.name} was updated in the contest \"{name}\".", 'manage.board.update', + { + "name": name, + "status": status, + "start": start, + "end": end, + "pro_list": pro_list, + "acct_list": acct_list, + } + ) await BoardService.inst.update_board(board_id, name, status, start, end, pro_list, acct_list) self.finish('S') diff --git a/src/handlers/manage/bulletin.py b/src/handlers/manage/bulletin.py index 12aa2074..01d5cd24 100644 --- a/src/handlers/manage/bulletin.py +++ b/src/handlers/manage/bulletin.py @@ -1,5 +1,3 @@ -import tornado.web - from handlers.base import RequestHandler, reqenv, require_permission from services.bulletin import BulletinService from services.log import LogService @@ -43,7 +41,12 @@ async def post(self, page=None): await BulletinService.inst.add_bulletin(title, content, self.acct.acct_id, color, pinned) await LogService.inst.add_log( - f"{self.acct.name} added a line on bulletin: \"{title}\".", 'manage.inform.add' + f"{self.acct.name} added a line on bulletin: \"{title}\".", 'manage.inform.add', + { + "content": content, + "is_pinned": pinned, + "color": color, + } ) await self.finish('S') @@ -63,6 +66,11 @@ async def post(self, page=None): await LogService.inst.add_log( f"{self.acct.name} updated a line on bulletin: \"{title}\" which id is #{bulletin_id}.", 'manage.inform.update', + { + "content": content, + "is_pinned": pinned, + "color": color, + } ) await BulletinService.inst.edit_bulletin(bulletin_id, title, content, self.acct.acct_id, color, pinned) await self.finish('S') diff --git a/src/handlers/manage/group.py b/src/handlers/manage/group.py index e5c827a9..b681284a 100644 --- a/src/handlers/manage/group.py +++ b/src/handlers/manage/group.py @@ -1,4 +1,4 @@ -import tornado +import tornado.web from handlers.base import RequestHandler, reqenv, require_permission from services.group import GroupConst, GroupService diff --git a/src/handlers/manage/judge.py b/src/handlers/manage/judge.py index 4bc17427..6ec3cb4f 100644 --- a/src/handlers/manage/judge.py +++ b/src/handlers/manage/judge.py @@ -1,7 +1,7 @@ import asyncio import base64 -from msgpack import packb, unpackb +from msgpack import packb import config from handlers.base import ( @@ -85,4 +85,4 @@ async def listen_newchal(self): async def open(self): await self.p.subscribe('judgechalcnt_sub') - self.task = asyncio.tasks.Task(self.listen_newchal()) \ No newline at end of file + self.task = asyncio.tasks.Task(self.listen_newchal()) diff --git a/src/handlers/manage/pro.py b/src/handlers/manage/pro.py index b1ed6b1b..5844d44d 100644 --- a/src/handlers/manage/pro.py +++ b/src/handlers/manage/pro.py @@ -2,8 +2,9 @@ import base64 import json import os -import shutil +import tornado.web +import tornado.escape from msgpack import packb, unpackb import config @@ -11,7 +12,7 @@ from services.chal import ChalConst, ChalService from services.judge import JudgeServerClusterService from services.log import LogService -from services.pro import ProService +from services.pro import ProService, ProConst from services.user import UserConst from services.pack import PackService @@ -33,54 +34,98 @@ async def get(self, page=None): pro_id = int(self.get_argument('proid')) err, pro = await ProService.inst.get_pro(pro_id, self.acct) - if err == 'Econf': - self.finish( - ''' - - ''' - ) - return - elif err is not None: + if err: self.error(err) return lock = await self.rs.get(f"{pro['pro_id']}_owner") - testl = [] - for test_idx, test_conf in pro['testm_conf'].items(): - testl.append( - { - 'test_idx': test_idx, - 'timelimit': test_conf['timelimit'], - 'memlimit': test_conf['memlimit'], - 'weight': test_conf['weight'], - 'rate': 2000, - } - ) - await self.render( - 'manage/pro/update', page='pro', pro=pro, lock=lock, testl=testl + 'manage/pro/update', page='pro', pro=pro, lock=lock ) elif page == "add": await self.render('manage/pro/add', page='pro') - elif page == "reinit": + elif page == "filemanager": pro_id = int(self.get_argument('proid')) + err, pro = await ProService.inst.get_pro(pro_id, self.acct) - await self.render('manage/pro/reinit', page='pro', pro_id=pro_id) + testm_conf = pro['testm_conf'] + dirs = [] + if testm_conf['is_makefile']: + files = list(sorted(filter(lambda name: os.path.isfile(f'problem/{pro_id}/res/make/{name}'), os.listdir(f'problem/{pro_id}/res/make')))) + dirs.append({ + 'path': 'res/make', + 'files': files, + }) + + if testm_conf['check_type'] in [ProConst.CHECKER_IOREDIR, ProConst.CHECKER_CMS]: + files = list(sorted(filter(lambda name: os.path.isfile(f'problem/{pro_id}/res/check/{name}'), os.listdir(f'problem/{pro_id}/res/check')))) + dirs.append({ + 'path': 'res/check', + 'files': files, + }) + + files = list(sorted(filter(lambda name: os.path.isfile(f'problem/{pro_id}/http/{name}'), os.listdir(f'problem/{pro_id}/http')))) + dirs.append({ + 'path': 'http', + 'files': files, + }) + + await self.render('manage/pro/filemanager', page='pro', pro_id=pro_id, dirs=dirs) elif page == "updatetests": pro_id = int(self.get_argument('proid')) + + try: + download = self.get_argument('download') + except tornado.web.HTTPError: + download = None + + if download: + return NotImplemented + basepath = f'problem/{pro_id}/res/testdata' + filepath = f'{basepath}/{download}' + if not self._is_file_access_safe(basepath, download): + # TODO: log illegal action + self.error('Eacces') + return + + if not os.path.exists(filepath): + self.error('Enoext') + return + + # TODO: log + + self.set_header('Content-Type', 'application/octet-stream') + self.set_header('Content-Disposition', f'attachment; filename="{download}"') + with open(filepath, 'rb') as f: + try: + while True: + buffer = f.read(65536) + if buffer: + self.write(buffer) + else: + self.finish() + return + except: + self.error('Eunk') + + return + + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + files = sorted(set(map(lambda file: file.replace('.in', '').replace('.out', ''), + filter(lambda file: file.endswith('.in') or file.endswith('.out'), os.listdir(f'problem/{pro_id}/res/testdata'))))) - await self.render('manage/pro/updatetests', page='pro', pro_id=pro_id, tests=pro['testm_conf']) + await self.render( + 'manage/pro/updatetests', + page='pro', + pro_id=pro_id, + tests=pro['testm_conf'], + files=files + ) @reqenv @require_permission(UserConst.ACCTTYPE_KERNEL) @@ -90,10 +135,13 @@ async def post(self, page=None): if page == "add" and reqtype == 'addpro': name = self.get_argument('name') status = int(self.get_argument('status')) - expire = None - pack_token = self.get_argument('pack_token') + mode = self.get_argument('mode') - err, pro_id = await ProService.inst.add_pro(name, status, expire, pack_token) + pack_token = None + if mode == "upload": + pack_token = self.get_argument('pack_token') + + err, pro_id = await ProService.inst.add_pro(name, status, pack_token) await LogService.inst.add_log( f"{self.acct.name} had been send a request to add the problem #{pro_id}", 'manage.pro.add.pro' ) @@ -106,19 +154,36 @@ async def post(self, page=None): elif page == "updatetests": if reqtype == "preview": pro_id = int(self.get_argument('pro_id')) - idx = int(self.get_argument('idx')) - type = self.get_argument('type') + filename = self.get_argument('filename') + test_type = self.get_argument('type') - if type not in ["in", "out"]: + if test_type not in ['out', 'in']: self.error('Eparam') return - path = f'problem/{pro_id}/res/testdata/{idx}.{type}' - if not os.path.isfile(path): + filename += f".{test_type}" + basepath = f'problem/{pro_id}/res/testdata' + if not self._is_file_access_safe(basepath, filename): + await LogService.inst.add_log( + f'{self.acct.name} tried to preview file:{filename} of the problem #{pro_id}, but it was suspicious', + 'manage.pro.update.tests.preview.failed' + ) + self.error('Eacces') + return + + filepath = os.path.join(basepath, filename) + + if not os.path.exists(filepath): + await LogService.inst.add_log( + f'{self.acct.name} tried to preview file:{filename} of the problem #{pro_id} but not found', + 'manage.pro.update.tests.preview.failed' + ) self.error('Enoext') return - with open(f'problem/{pro_id}/res/testdata/{idx}.{type}', 'r') as testcase_f: + await LogService.inst.add_log(f'{self.acct.name} preview file:{filename} of the problem #{pro_id}', + 'manage.pro.update.tests.preview') + with open(filepath, 'r') as testcase_f: content = testcase_f.readlines() if len(content) > 25: self.error('Efile') @@ -127,77 +192,505 @@ async def post(self, page=None): self.finish(json.dumps(''.join(content))) elif reqtype == "updateweight": - # TODO - return NotImplemented pro_id = int(self.get_argument('pro_id')) group = int(self.get_argument('group')) weight = int(self.get_argument('weight')) err, pro = await ProService.inst.get_pro(pro_id, self.acct) + test_group = pro['testm_conf']['test_group'] + if group not in test_group: + self.error('Enoext') + return + + test_group[group]['weight'] = weight + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) await LogService.inst.add_log( - f"{self.acct.name} had been send a request to add the problem #{pro_id}", 'manage.pro.update.tests' + f'{self.acct.name} had been send a request to update weight of subtask#{group} of the problem #{pro_id}', + 'manage.pro.update.tests.updateweight', + { + 'weight': weight, + } ) + self.finish('S') - elif reqtype == "updatesingletestcase": + elif reqtype == "addtaskgroup": pro_id = int(self.get_argument('pro_id')) - idx = int(self.get_argument('idx')) - test_type = self.get_argument('type') - pack_token = self.get_argument('pack_token') + weight = int(self.get_argument('weight')) + + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + test_group = pro['testm_conf']['test_group'] + + test_group[len(test_group)] = { + 'weight': weight, + 'metadata': {'data': []} + } + + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to add a new subtask of the problem #{pro_id}', + 'manage.pro.update.tests.addtaskgroup', + { + 'weight': weight, + 'test_group_idx': len(test_group) - 1 + } + ) + self.finish('S') + + elif reqtype == 'deletetaskgroup': + pro_id = int(self.get_argument('pro_id')) + group = int(self.get_argument('group')) - path = f'problem/{pro_id}/res/testdata/{idx}.{test_type}' - if not os.path.isfile(path): + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + test_group = pro['testm_conf']['test_group'] + if group not in test_group: self.error('Enoext') return - _ = await PackService.inst.direct_copy(pack_token, path) + test_group.pop(group) + remain_groups = list(test_group.values()) + test_group.clear() + + for group_idx, group in enumerate(remain_groups): + test_group[group_idx] = group + + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) await LogService.inst.add_log( - f"{self.acct.name} had been send a request to update a single testcase of the problem #{pro_id}", - 'manage.pro.update.tests' + f'{self.acct.name} had been send a request to delete a subtask of the problem #{pro_id}', + 'manage.pro.update.tests.deletetaskgroup', ) + self.finish('S') + + elif reqtype == 'addsingletestcase': + pro_id = int(self.get_argument('pro_id')) + group = int(self.get_argument('group')) + testcase = self.get_argument('testcase') + + basepath = f'problem/{pro_id}/res/testdata' + if not os.path.exists(f'{basepath}/{testcase}.in') or not os.path.exists(f'{basepath}/{testcase}.out'): + self.error('Enoext') + return + + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + test_group = pro['testm_conf']['test_group'] + if group not in test_group: + self.error('Enoext') + return + + for t in test_group[group]['metadata']['data']: + if testcase == str(t): + await LogService.inst.add_log( + f'{self.acct.name} tried to add testcase:{testcase} for problem #{pro_id} but already exists', + 'manage.pro.update.tests.addsingletestcase', + ) + self.error('Eexist') + return + test_group[group]['metadata']['data'].append(testcase) + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to add a testcase:{testcase} to group#{group} of the problem #{pro_id}', + 'manage.pro.update.tests.addsingletestcase', + ) self.finish('S') - elif reqtype == "deletesingletestcase": + elif reqtype == 'deletesingletestcase': pro_id = int(self.get_argument('pro_id')) - idx = int(self.get_argument('idx')) + group = int(self.get_argument('group')) + testcase = self.get_argument('testcase') - path = f'problem/{pro_id}/res/testdata' - if not os.path.exists(f'{path}/{idx}.in') or not os.path.exists(f'{path}/{idx}.out'): + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + test_group = pro['testm_conf']['test_group'] + if group not in test_group: self.error('Enoext') return + try: + test_group[group]['metadata']['data'].remove(testcase) + except ValueError: + self.error('Enoext') + return + + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) await LogService.inst.add_log( - f"{self.acct.name} had been send a request to delete a single testcase of the problem #{pro_id}", - 'manage.pro.update.tests' + f'{self.acct.name} had been send a request to delete a testcase:{testcase} to group#{group} for problem #{pro_id}', + 'manage.pro.update.tests.deletesingletestcase', ) - os.remove(f'{path}/{idx}.in') - os.remove(f'{path}/{idx}.out') + self.finish('S') + + elif reqtype == 'renamesinglefile': + pro_id = int(self.get_argument('pro_id')) + old_filename = self.get_argument('old_filename') + new_filename = self.get_argument('new_filename') + + # check filename + basepath = f'problem/{pro_id}/res/testdata' + old_inputfile_path = f'{basepath}/{old_filename}.in' + old_outputfile_path = f'{basepath}/{old_filename}.out' + new_inputfile_path = f'{basepath}/{new_filename}.in' + new_outputfile_path = f'{basepath}/{new_filename}.out' + if not self._is_file_access_safe(basepath, f'{old_filename}.in') or not self._is_file_access_safe(basepath, f'{new_filename}.in'): + await LogService.inst.add_log( + f'{self.acct.name} tried to rename {old_filename} to {new_filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.tests.renamesinglefile.failed' + ) + self.error('Eacces') + return + + if not os.path.exists(old_inputfile_path) or not os.path.exists(old_outputfile_path): + await LogService.inst.add_log( + f'{self.acct.name} tried to rename {old_filename} to {new_filename} for problem #{pro_id} but {old_filename} not found', + 'manage.pro.update.tests.renamesinglefile.failed' + ) + self.error('Enoext') + return + + if os.path.exists(new_inputfile_path) or os.path.exists(new_outputfile_path): + await LogService.inst.add_log( + f'{self.acct.name} tried to rename {old_filename} to {new_filename} for problem #{pro_id} but {new_filename} already exists', + 'manage.pro.update.tests.renamesinglefile.failed' + ) + self.error('Eexist') + return + + os.rename(old_inputfile_path, new_inputfile_path) + os.rename(old_outputfile_path, new_outputfile_path) err, pro = await ProService.inst.get_pro(pro_id, self.acct) - for test_idx, test_conf in pro['testm_conf'].items(): - tests = test_conf['metadata']['data'] - if tests[0] <= idx <= tests[-1]: - tests.remove(idx) - break + is_modified = False + for test_group in pro['testm_conf']['test_group'].values(): + test = test_group['metadata']['data'] - await ProService.inst.update_testcases(pro_id, pro['testm_conf']) + for i in range(len(test)): + if test[i] == old_filename: + is_modified = True + test[i] = new_filename + if is_modified: + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to rename {old_filename} to {new_filename} for problem #{pro_id}', + 'manage.pro.update.tests.renamesinglefile', + ) self.finish('S') - elif reqtype == "reorder": + elif reqtype == 'updatesinglefile': pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + test_type = self.get_argument('type') + pack_token = self.get_argument('pack_token') + + if test_type not in ['output', 'input']: + PackService.inst.clear(pack_token) + self.error('Eparam') + return + + basepath = f'problem/{pro_id}/res/testdata' + filepath = f'{basepath}/{filename}.{test_type[0:-3]}' + + if not self._is_file_access_safe(basepath, f"{filename}.{test_type[0:-3]}"): + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to update {filename} of the problem #{pro_id}, but it was suspicious', + 'manage.pro.update.tests.updatesinglefile.failed' + ) + self.error('Eacces') + return + + if not os.path.exists(filepath): + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to update {filename}.{test_type[0:-3]} for problem #{pro_id} but not found', + 'manage.pro.update.tests.updatesinglefile.failed' + ) + self.error('Enoext') + return + + _ = await PackService.inst.direct_copy(pack_token, filepath) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to update a single file:{filename} of the problem #{pro_id}', + 'manage.pro.update.tests.updatesinglefile', + ) + + self.finish('S') + + elif reqtype == "addsinglefile": + pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + input_pack_token = self.get_argument('input_pack_token') + output_pack_token = self.get_argument('output_pack_token') + + basepath = f'problem/{pro_id}/res/testdata' + inputfile_path = f'{basepath}/{filename}.in' + outputfile_path = f'{basepath}/{filename}.out' + + if not self._is_file_access_safe( + basepath, f'{filename}.in' + ) or not self._is_file_access_safe(basepath, f'{filename}.out'): + PackService.inst.clear(input_pack_token) + PackService.inst.clear(output_pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to add a single file:{filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.tests.addsinglefile.failed' + ) + self.error('Eacces') + return + + if os.path.exists(inputfile_path) or os.path.exists(outputfile_path): + PackService.inst.clear(input_pack_token) + PackService.inst.clear(output_pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to add single file:{filename} for problem #{pro_id} but {filename} already exists', + 'manage.pro.update.tests.addsinglefile.failed' + ) + self.error('Eexist') + return + + _ = await PackService.inst.direct_copy(input_pack_token, inputfile_path) + _ = await PackService.inst.direct_copy(output_pack_token, outputfile_path) + + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to add a single file:{filename} for problem #{pro_id}', + 'manage.pro.update.tests.addsinglefile', + ) + + self.finish('S') + + elif reqtype == 'deletesinglefile': + pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + + basepath = f'problem/{pro_id}/res/testdata' + if not self._is_file_access_safe(basepath, f'{filename}.in'): + await LogService.inst.add_log( + f'{self.acct.name} tried to delete a single file:{filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.tests.deletesinglefile.failed' + ) + self.error('Eacces') + return + + if not os.path.exists(f'{basepath}/{filename}.in') or not os.path.exists(f'{basepath}/{filename}.out'): + await LogService.inst.add_log( + f'{self.acct.name} tried to delete a single file:{filename} for problem #{pro_id} but not found', + 'manage.pro.update.tests.deletesinglefile.failed' + ) + self.error('Enoext') + return + + os.remove(f'{basepath}/{filename}.in') + os.remove(f'{basepath}/{filename}.out') err, pro = await ProService.inst.get_pro(pro_id, self.acct) - if err: - self.error(err) + for test_group in pro['testm_conf']['test_group'].values(): + test = test_group['metadata']['data'] + + try: + test.remove(filename) + except ValueError: + pass + + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to delete a single file:{filename} of the problem #{pro_id}', + 'manage.pro.update.tests.deletesinglefile', + ) + + self.finish('S') + + elif page == "filemanager": + if reqtype == "preview": + pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + basepath = self.get_argument('path') + + if basepath not in ['http', 'res/check', 'res/make']: + self.error('Eparam') + return + + basepath = f'problem/{pro_id}/{basepath}' + if not self._is_file_access_safe(basepath, filename): + await LogService.inst.add_log( + f'{self.acct.name} tried to preview {filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.filemanager.preview.failed' + ) + self.error('Eacces') + return + + filepath = os.path.join(basepath, filename) + + if not os.path.exists(filepath): + await LogService.inst.add_log( + f'{self.acct.name} tried to preview {filename} for problem #{pro_id} but not found', + 'manage.pro.update.filemanager.preview.failed' + ) + self.error('Enoext') + return + + await LogService.inst.add_log(f'{self.acct.name} preview {filename} for problem #{pro_id}', + 'manage.pro.update.filemanager.preview') + with open(filepath, 'r') as f: + try: + content = tornado.escape.xhtml_escape(f.read()) + except UnicodeDecodeError: + self.error('Eunicode') + return + + self.finish(json.dumps(content)) + + elif reqtype == 'renamesinglefile': + pro_id = int(self.get_argument('pro_id')) + old_filename = self.get_argument('old_filename') + new_filename = self.get_argument('new_filename') + basepath = self.get_argument('path') + + if basepath not in ['http', 'res/check', 'res/make']: + self.error('Eparam') + return + + basepath = f'problem/{pro_id}/{basepath}' + old_filepath = f'{basepath}/{old_filename}' + new_filepath = f'{basepath}/{new_filename}' + if not self._is_file_access_safe(basepath, new_filename): + await LogService.inst.add_log( + f'{self.acct.name} tried to rename {old_filename} to {new_filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.filemanager.renamesinglefile.failed' + ) + self.error('Eacces') + return + + if not os.path.exists(old_filepath): + await LogService.inst.add_log( + f'{self.acct.name} tried to rename {old_filename} to {new_filename} for problem #{pro_id} but {old_filename} not found', + 'manage.pro.update.filemanager.renamesinglefile.failed' + ) + self.error('Enoext') + return + + if os.path.exists(new_filepath): + await LogService.inst.add_log( + f'{self.acct.name} tried to rename {old_filename} to {new_filename} for problem #{pro_id} but {new_filename} already exists', + 'manage.pro.update.filemanager.renamesinglefile.failed' + ) + self.error('Eexist') + return + + os.rename(old_filepath, new_filepath) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to rename {old_filename} to {new_filename} for problem #{pro_id}', + 'manage.pro.update.filemanager.renamesinglefile', + ) + self.finish('S') + + elif reqtype == 'updatesinglefile': + pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + pack_token = self.get_argument('pack_token') + basepath = self.get_argument('path') + + if basepath not in ['http', 'res/check', 'res/make']: + self.error('Eparam') + return + + basepath = f'problem/{pro_id}/{basepath}' + filepath = f'{basepath}/{filename}' + + if not self._is_file_access_safe(basepath, filename): + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to update {filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.filemanager.updatesinglefile.failed' + ) + self.error('Eacces') + return + + if not os.path.exists(filepath): + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to update {filename} for problem #{pro_id} but not found', + 'manage.pro.update.filemanager.updatesinglefile.failed' + ) + self.error('Enoext') + return + + _ = await PackService.inst.direct_copy(pack_token, filepath) + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to update {filename} for problem #{pro_id}', + 'manage.pro.update.filemanager.updatesinglefile', + ) + + self.finish('S') + + elif reqtype == 'addsinglefile': + pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + pack_token = self.get_argument('pack_token') + basepath = self.get_argument('path') + + if basepath not in ['http', 'res/check', 'res/make']: + self.error('Eparam') + return + + basepath = f'problem/{pro_id}/{basepath}' + filepath = f'{basepath}/{filename}' + + if not self._is_file_access_safe(basepath, filename): + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to add {filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.filemanager.addsinglefile.failed' + ) + self.error('Eacces') + return + + if os.path.exists(filepath): + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f'{self.acct.name} tried to add {filename} for problem #{pro_id} but {filename} already exists', + 'manage.pro.update.filemanager.addsinglefile.failed' + ) + self.error('Eexist') return - await self._reorder_testcases(pro_id, pro['testm_conf']) - await ProService.inst.update_testcases(pro_id, pro['testm_conf']) + _ = await PackService.inst.direct_copy(pack_token, filepath) await LogService.inst.add_log( - f"{self.acct.name} had been send a request to reorder all testcases of the problem #{pro_id}", - 'manage.pro.update.tests' + f'{self.acct.name} had been send a request to add {filename} for problem #{pro_id}', + 'manage.pro.update.filemanager.addsinglefile', + ) + + self.finish('S') + + elif reqtype == 'deletesinglefile': + pro_id = int(self.get_argument('pro_id')) + filename = self.get_argument('filename') + basepath = self.get_argument('path') + + if basepath not in ['http', 'res/check', 'res/make']: + self.error('Eparam') + return + + basepath = f'problem/{pro_id}/{basepath}' + filepath = f'{basepath}/{filename}' + if not self._is_file_access_safe(basepath, filename): + await LogService.inst.add_log( + f'{self.acct.name} tried to delete {filename} for problem #{pro_id}, but it was suspicious', + 'manage.pro.update.filemanager.addsinglefile.failed' + ) + self.error('Eacces') + return + + if not os.path.exists(filepath): + await LogService.inst.add_log( + f'{self.acct.name} tried to delete {filename} for problem #{pro_id} but not found', + 'manage.pro.update.filemanager.addsinglefile.failed' + ) + self.error('Enoext') + return + + os.remove(f'{basepath}/{filename}') + + await LogService.inst.add_log( + f'{self.acct.name} had been send a request to delete {filename} for problem #{pro_id}', + 'manage.pro.update.filemanager.deletesinglefile', ) self.finish('S') @@ -207,19 +700,52 @@ async def post(self, page=None): pro_id = int(self.get_argument('pro_id')) name = self.get_argument('name') status = int(self.get_argument('status')) - expire = None - pack_type = int(self.get_argument('pack_type')) - pack_token = self.get_argument('pack_token') tags = self.get_argument('tags') - - if pack_token == '': - pack_token = None + allow_submit = self.get_argument('allow_submit') == "true" + is_makefile = self.get_argument('is_makefile') == "true" + check_type = int(self.get_argument('check_type')) + + chalmeta = '' + if check_type == ProConst.CHECKER_IOREDIR: + chalmeta = self.get_argument('chalmeta') + try: + chalmeta = json.loads(chalmeta) + except json.JSONDecodeError: + self.error('Econf') + return err, _ = await ProService.inst.update_pro( - pro_id, name, status, expire, pack_type, pack_token, tags + pro_id, name, status, None, None, tags, allow_submit ) + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + old_is_makefile = pro['testm_conf']['is_makefile'] + old_check_type = pro['testm_conf']['check_type'] + custom_check_type = [ProConst.CHECKER_IOREDIR, ProConst.CHECKER_CMS] + if not old_is_makefile and is_makefile: + if not os.path.exists(f'problem/{pro_id}/res/make'): + os.mkdir(f'problem/{pro_id}/res/make') + pro['testm_conf']['is_makefile'] = is_makefile + + if old_check_type not in custom_check_type and check_type in custom_check_type: + if not os.path.exists(f'problem/{pro_id}/res/check'): + os.mkdir(f'problem/{pro_id}/res/check') + pro['testm_conf']['check_type'] = check_type + + if check_type == ProConst.CHECKER_IOREDIR: + chalmeta = json.dumps(chalmeta) + + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) await LogService.inst.add_log( - f"{self.acct.name} had been send a request to update the problem #{pro_id}", 'manage.pro.update.pro' + f"{self.acct.name} had been send a request to update the problem #{pro_id}", 'manage.pro.update.pro', + { + 'name': name, + 'status': status, + 'tags': tags, + 'allow_submit': allow_submit, + 'is_makefile': is_makefile, + 'chalmeta': chalmeta, + 'check_type': check_type, + } ) if err: self.error(err) @@ -227,30 +753,102 @@ async def post(self, page=None): self.finish('S') - elif reqtype == 'reinitpro': + elif reqtype == "uploadpackage": + # TODO: file update need self password verification pro_id = int(self.get_argument('pro_id')) pack_token = self.get_argument('pack_token') - pack_type = ProService.inst.PACKTYPE_FULL - err, _ = await ProService.inst.unpack_pro(pro_id, pack_type, pack_token) + + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + err, _ = await ProService.inst.update_pro( + pro_id, pro['name'], pro['status'], ProService.PACKTYPE_FULL, pack_token, pro['tags'], pro['allow_submit'] + ) + if err: + PackService.inst.clear(pack_token) + await LogService.inst.add_log( + f"{self.acct.name} tried to update the problem #{pro_id} by uploading problem package but failed", + 'manage.pro.update.pro.package.failed', + { + 'err': err + } + ) self.error(err) return + suspicious_files = [] + for file in os.listdir(f"problem/{pro_id}/res/testdata"): + if os.path.islink(file): + suspicious_files.append((file, os.path.realpath(file))) + + if suspicious_files: + await LogService.inst.add_log(f'There are some suspicious files that may have been uploaded by {self.acct.name}', 'manage.pro.update.suspicious', { + 'suspicious_files': suspicious_files, + 'uploader': self.acct.acct_id, + }) + + await LogService.inst.add_log( + f"{self.acct.name} had been send a request to update the problem #{pro_id} by uploading problem package", + 'manage.pro.update.pro.package', + ) + self.finish('S') elif reqtype == 'updatelimit': pro_id = int(self.get_argument('pro_id')) - timelimit = int(self.get_argument('timelimit')) - memlimit = int(self.get_argument('memlimit')) + limits = json.loads(self.get_argument('limits')) + + err, pro = await ProService.inst.get_pro(pro_id, self.acct) + + ALLOW_COMPILERS = ChalConst.ALLOW_COMPILERS + if pro['testm_conf']['is_makefile']: + ALLOW_COMPILERS = ['gcc', 'g++', 'clang', 'clang++', 'default'] + + def _check(comp_type, limit): + if comp_type not in ALLOW_COMPILERS and comp_type != "default": + return False + + if 'timelimit' not in limit: + return False + try: + int(limit['timelimit']) + except ValueError: + return False + + if 'memlimit' not in limit: + return False + + try: + int(limit['memlimit']) + except ValueError: + return False + + return True + + limits = { comp_type:limit for comp_type, limit in limits.items() if _check(comp_type, limit) } + if 'default' not in limits: + self.error('Eparam') + return + + for _, limit in limits.items(): + limit['timelimit'] = int(limit['timelimit']) + limit['memlimit'] = int(limit['memlimit']) * 1024 + + if limit['timelimit'] < 0: + limit['timelimit'] = 0 + + if limit['memlimit'] < 0: + limit['memlimit'] = 0 + + pro['testm_conf']['limit'] = limits + await ProService.inst.update_test_config(pro_id, pro['testm_conf']) - err, _ = await ProService.inst.update_limit(pro_id, timelimit, memlimit) await LogService.inst.add_log( f"{self.acct.name} had been send a request to update the problem #{pro_id}", 'manage.pro.update.limit', + { + 'limits': limits + } ) - if err: - self.error(err) - return self.finish('S') @@ -332,7 +930,6 @@ async def post(self, page=None): # TODO: send notify to user async def _rechal(rechals): for chal_id, comp_type in rechals: - file_ext = ChalConst.FILE_EXTENSION[comp_type] _, _ = await ChalService.inst.reset_chal(chal_id) _, _ = await ChalService.inst.emit_chal( chal_id, @@ -346,27 +943,12 @@ async def _rechal(rechals): self.finish('S') - async def _reorder_testcases(self, pro_id, tests): - path = f'problem/{pro_id}/res/testdata' - cnt = 1 - for test_conf in tests.values(): - new_tests = [] - for test in test_conf['metadata']['data']: - new_tests.append(cnt) - if test != cnt: - # order changed - - shutil.move(f'{path}/{test}.in', f'{path}/{cnt}.in.tmp') - shutil.move(f'{path}/{test}.out', f'{path}/{cnt}.out.tmp') - - cnt += 1 - - test_conf['metadata']['data'] = new_tests - - for i in range(1, cnt): - if not os.path.exists(f'{path}/{i}.in.tmp') or not os.path.exists(f'{path}/{i}.out.tmp'): - # order did not change - continue + def _is_file_access_safe(self, basedir, filename): + absolute_basepath = os.path.abspath(basedir) + absolute_filepath = os.path.abspath(os.path.join(basedir, filename)) + if os.path.commonpath([absolute_basepath]) != os.path.commonpath([absolute_basepath, absolute_filepath]): + return False + if os.path.exists(absolute_filepath): + return os.path.isfile(absolute_filepath) and not os.path.islink(absolute_filepath) + return True - shutil.move(f'{path}/{i}.in.tmp', f'{path}/{i}.in') - shutil.move(f'{path}/{i}.out.tmp', f'{path}/{i}.out') diff --git a/src/handlers/manage/proclass.py b/src/handlers/manage/proclass.py index fc6b2dc9..52b10349 100644 --- a/src/handlers/manage/proclass.py +++ b/src/handlers/manage/proclass.py @@ -1,6 +1,6 @@ from handlers.base import RequestHandler, reqenv, require_permission from services.log import LogService -from services.pro import ProClassService +from services.pro import ProClassService, ProClassConst from services.user import UserConst from utils.numeric import parse_list_str @@ -10,17 +10,23 @@ class ManageProClassHandler(RequestHandler): @require_permission(UserConst.ACCTTYPE_KERNEL) async def get(self, page=None): if page is None: - _, pubclass_list = await ProClassService.inst.get_pubclass_list() - await self.render('manage/proclass/proclass-list', page='proclass', pubclass_list=pubclass_list) + _, proclass_list = await ProClassService.inst.get_proclass_list() + proclass_list = filter(lambda proclass: proclass['type'] in [ProClassConst.OFFICIAL_PUBLIC, ProClassConst.OFFICIAL_HIDDEN], + proclass_list) + await self.render('manage/proclass/proclass-list', page='proclass', proclass_list=proclass_list) elif page == "add": await self.render('manage/proclass/add', page='proclass') elif page == "update": - pubclass_id = int(self.get_argument('pubclassid')) - _, pubclass = await ProClassService.inst.get_pubclass(pubclass_id) + proclass_id = int(self.get_argument('proclassid')) + _, proclass = await ProClassService.inst.get_proclass(proclass_id) + if proclass['type'] not in [ProClassConst.OFFICIAL_PUBLIC, ProClassConst.OFFICIAL_HIDDEN]: + self.error('Eacces') + return + + await self.render('manage/proclass/update', page='proclass', proclass_id=proclass_id, proclass=proclass) - await self.render('manage/proclass/update', page='proclass', pubclass_id=pubclass_id, pubclass=pubclass) @reqenv @require_permission(UserConst.ACCTTYPE_KERNEL) @@ -28,37 +34,67 @@ async def post(self, page=None): reqtype = self.get_argument('reqtype') if page == "add" and reqtype == 'add': name = self.get_argument('name') + desc = self.get_argument('desc') + proclass_type = int(self.get_argument('type')) p_list_str = self.get_argument('list') p_list = parse_list_str(p_list_str) + if proclass_type not in [ProClassConst.OFFICIAL_PUBLIC, ProClassConst.OFFICIAL_HIDDEN]: + self.error('Eparam') + return + if len(p_list) == 0: self.error('E') return await LogService.inst.add_log( - f"{self.acct.name} add proclass name={name} list={p_list}", 'manage.proclass.add' + f"{self.acct.name} add proclass name={name}", 'manage.proclass.add', + { + "list": p_list, + "desc": desc, + "proclass_type": proclass_type, + } ) - err, pubclass_id = await ProClassService.inst.add_pubclass(name, p_list) + err, proclass_id = await ProClassService.inst.add_proclass(name, p_list, desc, None, proclass_type) if err: self.error(err) return - self.finish(str(pubclass_id)) + self.finish(str(proclass_id)) elif page == "update" and reqtype == "update": - pubclass_id = int(self.get_argument('pubclass_id')) + proclass_id = int(self.get_argument('proclass_id')) name = self.get_argument('name') + desc = self.get_argument('desc') + proclass_type = int(self.get_argument('type')) p_list_str = self.get_argument('list') p_list = parse_list_str(p_list_str) + _, proclass = await ProClassService.inst.get_proclass(proclass_id) + if proclass['type'] not in [ProClassConst.OFFICIAL_PUBLIC, ProClassConst.OFFICIAL_HIDDEN]: + await LogService.inst.add_log( + f"{self.acct.name} tried to update proclass name={proclass['name']}, but an admin cannot modify a user's own proclass", 'manage.proclass.update.failed' + ) + self.error('Eacces') + return + + if proclass_type not in [ProClassConst.OFFICIAL_PUBLIC, ProClassConst.OFFICIAL_HIDDEN]: + self.error('Eparam') + return + if len(p_list) == 0: self.error('E') return await LogService.inst.add_log( - f"{self.acct.name} update proclass name={name} list={p_list}", 'manage.proclass.update' + f"{self.acct.name} update proclass name={name}", 'manage.proclass.update', + { + "list": p_list, + "desc": desc, + "proclass_type": proclass_type, + } ) - err = await ProClassService.inst.update_pubclass(pubclass_id, name, p_list) + err = await ProClassService.inst.update_proclass(proclass_id, name, p_list, desc, proclass_type) if err: self.error(err) return @@ -66,12 +102,23 @@ async def post(self, page=None): self.finish('S') elif page == "update" and reqtype == "remove": - pubclass_id = int(self.get_argument('pubclass_id')) - _, pubclass = await ProClassService.inst.get_pubclass(pubclass_id) + proclass_id = int(self.get_argument('proclass_id')) + err, proclass = await ProClassService.inst.get_proclass(proclass_id) + + if err: + self.error(err) + return + + if proclass['type'] not in [ProClassConst.OFFICIAL_PUBLIC, ProClassConst.OFFICIAL_HIDDEN]: + await LogService.inst.add_log( + f"{self.acct.name} tried to remove proclass name={proclass['name']}, but an admin cannot modify a user's own proclass", 'manage.proclass.remove.failed' + ) + self.error('Eacces') + return await LogService.inst.add_log( - f"{self.acct.name} remove proclass name={pubclass['name']}.", 'manage.proclass.remove' + f"{self.acct.name} remove proclass name={proclass['name']}.", 'manage.proclass.remove' ) - await ProClassService.inst.remove_pubclass(pubclass_id) + await ProClassService.inst.remove_proclass(proclass_id) self.finish('S') diff --git a/src/handlers/manage/question.py b/src/handlers/manage/question.py index 79fb9a00..33aea8a1 100644 --- a/src/handlers/manage/question.py +++ b/src/handlers/manage/question.py @@ -1,4 +1,4 @@ -from msgpack import packb, unpackb +from msgpack import unpackb from handlers.base import RequestHandler, reqenv, require_permission from services.log import LogService @@ -33,25 +33,31 @@ async def post(self, page=None): if page == "reply": reqtype = self.get_argument('reqtype') if reqtype == 'rpl': + rtext = self.get_argument('rtext') await LogService.inst.add_log( - f"{self.acct.name} replyed a question from user #{self.get_argument('qacct_id')}:\"{self.get_argument('rtext')}\".", + f"{self.acct.name} replyed a question from user #{self.get_argument('qacct_id')}.", 'manage.question.reply', + { + 'reply_message': rtext + } ) index = self.get_argument('index') - rtext = self.get_argument('rtext') qacct_id = int(self.get_argument('qacct_id')) await QuestionService.inst.reply(qacct_id, index, rtext) self.finish('S') elif reqtype == 'rrpl': + rtext = self.get_argument('rtext') await LogService.inst.add_log( - f"{self.acct.name} re-replyed a question from user #{self.get_argument('qacct_id')}:\"{self.get_argument('rtext')}\".", + f"{self.acct.name} re-replyed a question from user #{self.get_argument('qacct_id')}.", 'manage.question.re-reply', + { + 'reply_message': rtext + } ) index = self.get_argument('index') - rtext = self.get_argument('rtext') qacct_id = int(self.get_argument('qacct_id')) await QuestionService.inst.reply(qacct_id, index, rtext) self.finish('S') diff --git a/src/handlers/pack.py b/src/handlers/pack.py index 47f05437..a18389d5 100644 --- a/src/handlers/pack.py +++ b/src/handlers/pack.py @@ -1,3 +1,4 @@ +import hashlib import json import os import uuid @@ -10,7 +11,7 @@ class PackHandler(WebSocketHandler): STATE_DTAT = 1 CHUNK_MAX = 65536 - def check_origin(self, origin: str) -> bool: + def check_origin(self, _: str) -> bool: # TODO: secure return True @@ -18,6 +19,8 @@ async def open(self): self.state = PackHandler.STATE_HDR self.output = None self.remain = 0 + self.sha1 = hashlib.sha1() + self.received_sha1 = '' async def on_message(self, msg): if self.state == PackHandler.STATE_DTAT: @@ -26,15 +29,22 @@ async def on_message(self, msg): self.write_message('Echunk') self.output.close() self.output = None + os.remove(f'tmp/{self.pack_token}') return self.output.write(msg) self.remain -= size + self.sha1.update(msg) if self.remain == 0: self.output.close() self.output = None + if self.sha1.hexdigest().lower() != self.received_sha1.lower(): + self.write_message('Ehash') + os.remove(f'tmp/{self.pack_token}') + return + self.write_message('S') elif self.state == PackHandler.STATE_HDR: @@ -42,6 +52,7 @@ async def on_message(self, msg): self.pack_token = str(uuid.UUID(hdr['pack_token'])) self.remain = hdr['pack_size'] + self.received_sha1 = hdr['sha-1'] self.output = open(f'tmp/{self.pack_token}', 'wb') self.state = PackHandler.STATE_DTAT diff --git a/src/handlers/pro.py b/src/handlers/pro.py index ffd5f5b2..f7941cd8 100644 --- a/src/handlers/pro.py +++ b/src/handlers/pro.py @@ -1,4 +1,4 @@ -import math +import json import tornado.web @@ -6,9 +6,9 @@ from services.chal import ChalConst from services.judge import JudgeServerClusterService from services.log import LogService -from services.pro import ProClassService, ProConst, ProService +from services.pro import ProClassService, ProClassConst, ProConst, ProService from services.rate import RateService -from services.user import UserConst +from services.user import UserService, UserConst def user_ac_cmp(pro): @@ -69,27 +69,50 @@ async def get(self): } try: - pubclass_id = int(self.get_argument('pubclass_id')) + proclass_id = int(self.get_argument('proclass_id')) except tornado.web.HTTPError: - pubclass_id = None + proclass_id = None - err, prolist = await ProService.inst.list_pro(self.acct, state=True) + err, prolist = await ProService.inst.list_pro(self.acct) - _, pubclass_list = await ProClassService.inst.get_pubclass_list() - - pubclass = None - if pubclass_id is not None: - err, pubclass = await ProClassService.inst.get_pubclass(pubclass_id) + proclass = None + if proclass_id is not None: + err, proclass = await ProClassService.inst.get_proclass(proclass_id) if err: self.error(err) return + proclass = dict(proclass) + + if proclass['type'] == ProClassConst.OFFICIAL_HIDDEN and not self.acct.is_kernel(): + self.error('Eacces') + return + elif proclass['type'] == ProClassConst.USER_HIDDEN and proclass['acct_id'] != self.acct.acct_id: + self.error('Eacces') + return - p_list = pubclass['list'] + p_list = proclass['list'] prolist = list(filter(lambda pro: pro['pro_id'] in p_list, prolist)) + if proclass['acct_id']: + _, creator = await UserService.inst.info_acct(proclass['acct_id']) + proclass['creator_name'] = creator.name if show_only_online_pro: prolist = list(filter(lambda pro: pro['status'] == ProConst.STATUS_ONLINE, prolist)) + _, acct_states = await RateService.inst.map_rate_acct(self.acct) + ac_pro_cnt = 0 + def _set_pro_state_and_tags(pro): + nonlocal ac_pro_cnt + pro['state'] = acct_states.get(pro['pro_id'], {}).get('state') + ac_pro_cnt += pro['state'] == ChalConst.STATE_AC + + if (self.acct.is_guest()) or (not self.acct.is_kernel() and pro['state'] != ChalConst.STATE_AC): + pro['tags'] = '' + + return pro + + prolist = list(map(lambda pro: _set_pro_state_and_tags(pro), prolist)) + if problem_show == "onlyac": prolist = list(filter(lambda pro: pro['state'] == ChalConst.STATE_AC, prolist)) @@ -126,15 +149,72 @@ async def get(self): await self.render( 'proset', + user=self.acct, pro_total_cnt=pro_total_cnt, + ac_pro_cnt=ac_pro_cnt, prolist=prolist, - pubclass_list=pubclass_list, - cur_pubclass=pubclass, + cur_proclass=proclass, pageoff=pageoff, flt=flt, - isadmin=self.acct.is_kernel(), ) + @reqenv + async def post(self): + reqtype = self.get_argument('reqtype') + if reqtype == "listproclass": + _, accts = await UserService.inst.list_acct(UserConst.ACCTTYPE_KERNEL) + accts = {acct.acct_id: acct.name for acct in accts} + _, proclass_list = await ProClassService.inst.get_proclass_list() + proclass_list = list(map(dict, proclass_list)) + for proclass in proclass_list: + if proclass['acct_id']: + proclass['creator_name'] = accts[proclass['acct_id']] + + proclass_cata = { + "official": list(filter(lambda proclass: proclass['type'] == ProClassConst.OFFICIAL_PUBLIC, proclass_list)), + "shared": list(filter(lambda proclass: proclass['type'] == ProClassConst.USER_PUBLIC, proclass_list)), + "collection": list(filter(lambda proclass: proclass['proclass_id'] in self.acct.proclass_collection, proclass_list)), + "own": list(filter(lambda proclass: proclass['acct_id'] == self.acct.acct_id, proclass_list)), + } + if self.acct.is_kernel(): + proclass_cata['official'].extend(filter(lambda proclass: proclass['type'] == ProClassConst.OFFICIAL_HIDDEN, proclass_list)) + + self.finish(json.dumps(proclass_cata)) + + elif reqtype == "collect": + if self.acct.is_guest(): + self.error('Eacces') + return + + proclass_id = int(self.get_argument('proclass_id')) + + if proclass_id in self.acct.proclass_collection: + self.error('Eexist') + return + + self.acct.proclass_collection.append(proclass_id) + self.acct.proclass_collection.sort() + await UserService.inst.update_acct(self.acct.acct_id, self.acct.acct_type, self.acct.name, + self.acct.photo, self.acct.cover, self.acct.motto, self.acct.proclass_collection) + self.finish('S') + + elif reqtype == "decollect": + if self.acct.is_guest(): + self.error('Eacces') + return + + proclass_id = int(self.get_argument('proclass_id')) + + if proclass_id not in self.acct.proclass_collection: + self.error('Enoext') + return + + self.acct.proclass_collection.remove(proclass_id) + self.acct.proclass_collection.sort() + await UserService.inst.update_acct(self.acct.acct_id, self.acct.acct_type, self.acct.name, + self.acct.photo, self.acct.cover, self.acct.motto, self.acct.proclass_collection) + self.finish('S') + class ProStaticHandler(RequestHandler): @reqenv @@ -167,7 +247,7 @@ async def get(self, pro_id, path): self.set_header('Pragma', 'public') self.set_header('Expires', '0') self.set_header('Cache-Control', 'must-revalidate, post-check=0, pre-check=0') - self.add_header('Content-Type', 'application/pdf') + self.set_header('Content-Type', 'application/pdf') try: download = self.get_argument('download') @@ -213,43 +293,14 @@ async def get(self, pro_id): self.error('Eacces') return - testl = [] - for test_idx, test_conf in pro['testm_conf'].items(): - testl.append( - { - 'test_idx': test_idx, - 'timelimit': test_conf['timelimit'], - 'memlimit': test_conf['memlimit'], - 'weight': test_conf['weight'], - 'rate': 2000, - } - ) - - async with self.db.acquire() as con: - result = await con.fetch( - ''' - SELECT "test_idx", "rate" FROM "test_valid_rate" - WHERE "pro_id" = $1 ORDER BY "test_idx" ASC; - ''', - pro_id, - ) - - countmap = {test_idx: count for test_idx, count in result} - for test in testl: - if test['test_idx'] in countmap: - test['rate'] = math.floor(countmap[test['test_idx']]) - - isguest = self.acct.is_guest() - isadmin = self.acct.is_kernel() - # NOTE: Guest cannot see tags # NOTE: Admin can see tags # NOTE: User get ac can see tags - if isguest or pro['tags'] is None or pro['tags'] == '': + if self.acct.is_guest() or pro['tags'] is None or pro['tags'] == '': pro['tags'] = '' - elif not isadmin: + elif not self.acct.is_kernel(): async with self.db.acquire() as con: result = await con.fetchrow( ''' @@ -274,14 +325,7 @@ async def get(self, pro_id): await self.render( 'pro', - pro={ - 'pro_id': pro['pro_id'], - 'name': pro['name'], - 'status': pro['status'], - 'tags': pro['tags'], - }, - testl=testl, - isadmin=isadmin, + pro=pro, can_submit=can_submit, contest=self.contest ) @@ -305,7 +349,7 @@ async def post(self): ) err, _ = await ProService.inst.update_pro( - pro_id, pro['name'], pro['status'], pro['expire'], '', None, tags + pro_id, pro['name'], pro['status'], '', None, tags, pro['allow_submit'] ) if err: diff --git a/src/handlers/ques.py b/src/handlers/ques.py index 82f39206..7537baf8 100644 --- a/src/handlers/ques.py +++ b/src/handlers/ques.py @@ -15,7 +15,7 @@ async def get(self): return await self.rs.set(f"{self.acct.acct_id}_have_reply", packb(False)) - await self.render('question', acct=self.acct, ques_list=ques_list) + await self.render('question', ques_list=ques_list) @reqenv @require_permission([UserConst.ACCTTYPE_USER]) diff --git a/src/handlers/rank.py b/src/handlers/rank.py index 6a4d7157..33ecd259 100644 --- a/src/handlers/rank.py +++ b/src/handlers/rank.py @@ -3,8 +3,7 @@ import tornado.web from handlers.base import RequestHandler, reqenv -from services.rate import RateService -from services.user import UserConst, UserService +from services.user import UserConst, UserService, Account class ProRankHandler(RequestHandler): @@ -27,29 +26,31 @@ async def get(self, pro_id): async with self.db.acquire() as con: result = await con.fetch( - 'SELECT *' - 'FROM (' - 'SELECT DISTINCT ON ("challenge"."acct_id")' - '"challenge"."chal_id",' - '"challenge"."acct_id",' - '"challenge"."timestamp",' - '"account"."name" AS "acct_name",' - '"challenge_state"."runtime",' - '"challenge_state"."memory" ' - 'FROM "challenge" ' - 'INNER JOIN "account" ' - 'ON "challenge"."acct_id"="account"."acct_id" ' - 'LEFT JOIN "challenge_state" ' - 'ON "challenge"."chal_id"="challenge_state"."chal_id" ' - 'WHERE "account"."acct_type">= $1 AND "challenge"."pro_id"= $2 ' - 'AND "challenge_state"."state"=1 ' - 'ORDER BY "challenge"."acct_id" ASC, ' - '"challenge_state"."runtime" ASC, "challenge_state"."memory" ASC,' - '"challenge"."timestamp" ASC, "challenge"."acct_id" ASC' - ') temp ' - 'ORDER BY "runtime" ASC, "memory" ASC,' - '"timestamp" ASC, "acct_id" ASC OFFSET $3 LIMIT $4;', - self.acct.acct_type, + ''' + SELECT * + FROM ( + SELECT DISTINCT ON ("challenge"."acct_id") + "challenge"."chal_id", + "challenge"."acct_id", + "challenge"."timestamp", + "account"."name" AS "acct_name", + "challenge_state"."runtime", + "challenge_state"."memory" + FROM "challenge" + INNER JOIN "account" + ON "challenge"."acct_id"="account"."acct_id" + INNER JOIN "challenge_state" + ON "challenge"."chal_id"="challenge_state"."chal_id" + WHERE "challenge"."pro_id"= $1 + AND "challenge_state"."state"=1 + ORDER BY "challenge"."acct_id" ASC, + "challenge_state"."runtime" ASC, "challenge_state"."memory" ASC, + "challenge"."timestamp" ASC, "challenge"."acct_id" ASC + ) temp + ORDER BY "runtime" ASC, "memory" ASC, + "timestamp" ASC, "acct_id" ASC OFFSET $2 LIMIT $3; + ''' + , pro_id, pageoff, pagenum, @@ -62,12 +63,11 @@ async def get(self, pro_id): SELECT DISTINCT challenge.acct_id FROM challenge INNER JOIN account ON challenge.acct_id=account.acct_id - LEFT JOIN challenge_state ON challenge.chal_id=challenge_state.chal_id - WHERE account.acct_type>=$1 AND challenge.pro_id=$2 + INNER JOIN challenge_state ON challenge.chal_id=challenge_state.chal_id + WHERE challenge.pro_id=$1 AND challenge_state.state=1 ) temp; ''', - self.acct.acct_type, pro_id, ) total_cnt = total_cnt[0]['count'] @@ -106,43 +106,64 @@ async def get(self): except tornado.web.HTTPError: pagenum = 20 - err, acctlist = await UserService.inst.list_acct(UserConst.ACCTTYPE_KERNEL) - if err: - self.error(err) - return - - err, ratemap = await RateService.inst.map_rate() - if err: - self.error(err) - return - - for acct in acctlist: - err, t_acct = await UserService.inst.info_acct(acct.acct_id) - if err: - self.error(err) - return - - err, rate_data = await RateService.inst.get_acct_rate_and_chal_cnt(acct) - if err: - self.error(err) - return - - rate_data['ac_pro_cnt'] = sum(1 for r in ratemap[acct.acct_id].values() if r['rate'] == 100) - acct.rate_data = rate_data - acct.photo = t_acct.photo - - total_cnt = len(acctlist) - acctlist.sort( - key=lambda acct: ( - acct.rate_data['ac_pro_cnt'], - acct.rate_data['ac_cnt'], - acct.rate_data['all_cnt'], - acct.rate_data['rate'], - ), - reverse=True, - ) - acctlist = acctlist[pageoff : pageoff + pagenum] - for rank, acct in enumerate(acctlist): - acct.rank = rank + 1 + pageoff + res = await self.db.fetch( + f''' + WITH user_stats AS ( + SELECT + a.acct_id, + a.name, + a.photo, + a.motto, + COUNT(DISTINCT CASE WHEN cs.state = 1 THEN c.pro_id END) AS ac_problem_count, + SUM(CASE WHEN cs.state = 1 THEN cs.rate ELSE 0 END) AS total_problem_rate, + COUNT(CASE WHEN cs.state = 1 THEN 1 END) AS ac_challenge_count, + COUNT(c.chal_id) AS all_challenge_count, + COUNT(CASE WHEN cs.state = 1 THEN 1 END)::float / NULLIF(COUNT(c.chal_id), 0) AS ac_ratio + + FROM + public.challenge c + INNER JOIN + public.challenge_state cs ON c.chal_id = cs.chal_id AND c.contest_id = 0 + INNER JOIN + public.account a ON a.acct_id = c.acct_id + INNER JOIN + public.problem ON c.pro_id = problem.pro_id AND problem.status = 0 + GROUP BY + a.acct_id + ) + SELECT + acct_id, + name, + photo, + motto, + ac_problem_count, + total_problem_rate, + ac_challenge_count, + all_challenge_count, + RANK() OVER (ORDER BY + ac_problem_count DESC, + total_problem_rate DESC, + ac_ratio DESC + ) AS rank + FROM + user_stats + ORDER BY + rank + OFFSET {pageoff} LIMIT {pagenum}; + ''') + + acctlist = [] + for acct_id, name, photo, motto, ac_pro_cnt, total_rate, ac_cnt, all_cnt, rank in res: + acct = Account(acct_id, -1, '', name, photo, '', motto, '', []) + acct.rank = rank + acct.rate_data = { + 'all_cnt': all_cnt, + 'ac_cnt': ac_cnt, + 'ac_pro_cnt': ac_pro_cnt, + } + acctlist.append(acct) + + _, t_acctlist = await UserService.inst.list_acct(UserConst.ACCTTYPE_KERNEL) + total_cnt = len(t_acctlist) await self.render('user-rank', acctlist=acctlist, pageoff=pageoff, pagenum=pagenum, total_cnt=total_cnt) diff --git a/src/handlers/report.py b/src/handlers/report.py index a4520732..ba98bd5c 100644 --- a/src/handlers/report.py +++ b/src/handlers/report.py @@ -8,4 +8,4 @@ class ReportHandler(RequestHandler): async def get(self): chal_id = int(self.get_argument('chal_id')) - await self.render('report-problem', chal_id=chal_id, acct=self.acct) + await self.render('report-problem', chal_id=chal_id) diff --git a/src/handlers/submit.py b/src/handlers/submit.py index 40343257..398a60f7 100644 --- a/src/handlers/submit.py +++ b/src/handlers/submit.py @@ -21,7 +21,6 @@ async def get(self, pro_id=None): pro_id = int(pro_id) - # TODO: if problem is makefile type, we should restrict compiler type allow_compilers = ChalConst.ALLOW_COMPILERS if self.contest: if not self.contest.is_running() and not self.contest.is_admin(self.acct): @@ -50,6 +49,13 @@ async def get(self, pro_id=None): self.error('Eacces') return + if not pro['allow_submit']: + self.error('Eacces') + return + + if pro['testm_conf']['is_makefile']: + allow_compilers = list(filter(lambda compiler: compiler in ['gcc', 'g++', 'clang', 'clang++'], allow_compilers)) + await self.render('submit', pro=pro, allow_compilers=allow_compilers, contest_id=self.contest.contest_id if self.contest else 0) @@ -103,6 +109,10 @@ async def post(self): self.error('Eacces') return + if not pro['allow_submit']: + self.error('Eacces') + return + err, chal_id = await ChalService.inst.add_chal(pro_id, self.acct.acct_id, contest_id, comp_type, code) if err: self.error(err) @@ -118,7 +128,7 @@ async def post(self): chal_id = int(self.get_argument('chal_id')) - err, ret = await ChalService.inst.reset_chal(chal_id) + err, _ = await ChalService.inst.reset_chal(chal_id) err, chal = await ChalService.inst.get_chal(chal_id) pro_id = chal['pro_id'] @@ -144,7 +154,7 @@ async def post(self): return if reqtype == 'submit' and pro['status'] in [ProService.STATUS_ONLINE, ProService.STATUS_CONTEST]: - await self.rs.publish('challist_sub', 1) + await self.rs.publish('challist_sub', str(1)) self.finish(json.dumps(chal_id)) return @@ -168,25 +178,25 @@ async def is_allow_submit(self, code: str, comp_type: str, pro_id: int): return 'Ecomp' should_check_submit_cd = ( - self.contest is None and not self.acct.is_kernel() # not in contest - or - self.contest and self.acct.acct_id in self.contest.acct_list # in contest + self.contest is None and not self.acct.is_kernel() # not in contest + or + self.contest and self.acct.acct_id in self.contest.acct_list # in contest ) + name = '' + crc32 = '' if self.contest: name = f'contest_{self.contest.contest_id}_acct_{self.acct.acct_id}_pro_{pro_id}_compiler_{comp_type}' crc32 = str(zlib.crc32(code.encode('utf-8'))) - if await self.rs.sismember(name, crc32): + if (await self.rs.sismember(name, crc32)): return 'Esame' - await self.rs.sadd(name, crc32) - await self.rs.expire(name, time=(self.contest.contest_end - self.contest.contest_start)) - if should_check_submit_cd: last_submit_name = f"last_submit_time_{self.acct.acct_id}" if (last_submit_time := (await self.rs.get(last_submit_name))) is None: - await self.rs.set(last_submit_name, int(time.time()), ex=submit_cd_time) # ex means expire + if submit_cd_time: + await self.rs.set(last_submit_name, int(time.time()), ex=submit_cd_time) # ex means expire else: last_submit_time = int(str(last_submit_time)[2:-1]) @@ -196,4 +206,8 @@ async def is_allow_submit(self, code: str, comp_type: str, pro_id: int): else: await self.rs.set(last_submit_name, int(time.time())) + if self.contest: + await self.rs.sadd(name, crc32) + await self.rs.expire(name, time=(self.contest.contest_end - self.contest.contest_start)) + return None diff --git a/src/runtests.py b/src/runtests.py new file mode 100644 index 00000000..271c9687 --- /dev/null +++ b/src/runtests.py @@ -0,0 +1,193 @@ +import os +import asyncio +import functools +import signal +import time +import subprocess +import multiprocessing + +import asyncpg +import coverage +import tornado.httpserver +import tornado.ioloop +import tornado.log +import tornado.netutil +import tornado.options +import tornado.process +import tornado.web +from redis import asyncio as aioredis + +import config as TestConfig +import url as ur +from services.judge import JudgeServerClusterService +from services.service import services_init +from tests.main import test_main + +MAX_WAIT_SECONDS_BEFORE_SHUTDOWN = 0 + + +def sig_handler(server, db, rs, pool, cov, view_task, sig, frame): + io_loop = tornado.ioloop.IOLoop.current() + + def stop_loop(deadline): + now = time.time() + if now < deadline and io_loop.time: + print("Waiting for next tick") + io_loop.add_timeout(now + 1, stop_loop, deadline) + else: + view_task.kill() + for task in asyncio.all_tasks(): + task.cancel() + + io_loop.run_in_executor(func=db.close, executor=None) + io_loop.run_in_executor(func=rs.aclose, executor=None) + io_loop.run_in_executor(func=pool.aclose, executor=None) + io_loop.run_in_executor( + func=JudgeServerClusterService.inst.disconnect_all_server, executor=None + ) + io_loop.stop() + + print("Shutdown finally") + + def shutdown(): + print("Stopping http server") + server.stop() + cov.stop() + cov.save() + print("Will shutdown in %s seconds ...", MAX_WAIT_SECONDS_BEFORE_SHUTDOWN) + stop_loop(time.time() + MAX_WAIT_SECONDS_BEFORE_SHUTDOWN) + + print("Caught signal: %s" % sig) + io_loop.add_callback_from_signal(shutdown) + + +async def materialized_view_task(): + db = await asyncpg.connect( + database=TestConfig.DBNAME_OJ, + user=TestConfig.DBUSER_OJ, + password=TestConfig.DBPW_OJ, + host="localhost", + ) + rs = await aioredis.Redis(host="localhost", port=6379, db=TestConfig.REDIS_DB) + p = rs.pubsub() + await p.subscribe("materialized_view_req") + + async def _update(): + ret = await rs.incr("materialized_view_counter") - 1 + await db.execute("SELECT refresh_challenge_state_incremental();") + return ret + + counter = await _update() + async for msg in p.listen(): + if msg["type"] != "message": + continue + + ind = int(msg["data"]) + if ind <= counter: + continue + + counter = await _update() + + +testing_loop = asyncio.get_event_loop() +if not os.path.exists('db-inited'): + subprocess.run( + [ + "/bin/bash", + "tests/reinit.sh", + TestConfig.DBNAME_OJ, + TestConfig.DBUSER_OJ, + TestConfig.DBPW_OJ, + ] + ) + open('db-inited', 'w').write('1') + +db: asyncpg.Pool = testing_loop.run_until_complete( + asyncpg.create_pool( + database=TestConfig.DBNAME_OJ, + user=TestConfig.DBUSER_OJ, + password=TestConfig.DBPW_OJ, + host="localhost", + loop=testing_loop, + ) +) + +pool = aioredis.ConnectionPool.from_url("redis://localhost", db=TestConfig.REDIS_DB) +rs = aioredis.Redis.from_pool(pool) + +if __name__ == "__main__": + e = multiprocessing.Event() + + def run_materialized_view_task(): + signal.signal(signal.SIGINT, lambda _, __: loop.stop()) + signal.signal(signal.SIGTERM, lambda _, __: loop.stop()) + loop = asyncio.new_event_loop() + try: + loop.run_until_complete(materialized_view_task()) + loop.run_forever() + + finally: + loop.stop() + loop.close() + + view_task_process = multiprocessing.Process(target=run_materialized_view_task) + + def m(event, view_task): + asyncio.set_event_loop(asyncio.new_event_loop()) + cov = coverage.Coverage(data_file=f".coverage.{os.getpid()}", branch=True) + cov.start() + + httpsock = tornado.netutil.bind_sockets(TestConfig.PORT) + + db2: asyncpg.Pool = asyncio.get_event_loop().run_until_complete( + asyncpg.create_pool( + database=TestConfig.DBNAME_OJ, + user=TestConfig.DBUSER_OJ, + password=TestConfig.DBPW_OJ, + host="localhost", + ) + ) + + pool2 = aioredis.ConnectionPool.from_url( + "redis://localhost", db=TestConfig.REDIS_DB + ) + rs2 = aioredis.Redis.from_pool(pool2) + + services_init(db2, rs2) + app = tornado.web.Application( + ur.get_url(db2, rs2, pool2), + autoescape="xhtml_escape", + cookie_secret=TestConfig.COOKIE_SEC, + ) + + httpsrv = tornado.httpserver.HTTPServer(app, xheaders=True) + httpsrv.add_sockets(httpsock) + + tornado.ioloop.IOLoop.current().run_sync(JudgeServerClusterService.inst.start) + + signal.signal( + signal.SIGINT, + functools.partial(sig_handler, httpsrv, db2, rs2, pool2, cov, view_task), + ) + signal.signal( + signal.SIGTERM, + functools.partial(sig_handler, httpsrv, db2, rs2, pool2, cov, view_task), + ) + + try: + event.set() + tornado.ioloop.IOLoop.current().start() + except: + pass + + asyncio.get_event_loop().run_until_complete(rs.flushall()) + view_task_process.start() + main_process = multiprocessing.Process(target=m, args=(e, view_task_process)) + main_process.start() + + while e.wait(): + services_init(db, rs) + test_main(testing_loop) + view_task_process.terminate() + main_process.terminate() + break diff --git a/src/runtests.sh b/src/runtests.sh new file mode 100755 index 00000000..4176e072 --- /dev/null +++ b/src/runtests.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +current_pwd=$(pwd) + +mkdir -p /tmp/ntoj_test_web/oj/problem +cat <config.py +DBNAME_OJ = 'ntoj_unittest_db_name' +DBUSER_OJ = 'ntoj_unittest_db_user' +DBPW_OJ = 'ntoj_unittest_db_password' +REDIS_DB = 2 +PORT = 5501 +COOKIE_SEC = 'ntoj-unittest' +SITE_TITLE = 'ntoj-unittest' +lock_user_list = [] +can_see_code_user = [1] +unlock_pwd = b'vW50b2otdW5pdHRlc3Qtc2VydmVyLXBhc3N3b3Jk' +JUDGE_SERVER_LIST = [ + { + 'name': 'NTOJ_Judge1', + 'url': 'ws://127.0.0.1:2502/judge', + 'problems_path': '${current_pwd}/problem', + 'codes_path': '${current_pwd}/code', + }, +] + +WEB_PROBLEM_STATIC_FILE_DIRECTORY = '/tmp/ntoj_test_web/oj/problem' +EOF + +cat <.coveragerc +[run] +branch = True +concurrency = thread +parallel = True +omit = + /usr/lib/python3/* + */site-packages/* + */dist-packages/* + *.generated.py + runtests.py + server.py + */e2e/* + upgrade.py +EOF + + +# run migration +cp config.py ../migration/ + +# remove old report record +rm .coverage.* +rm .coverage +rm -r ./htmlcov + +COVERAGE_PROCESS_START=.coveragerc $HOME/.local/bin/poetry run coverage run --branch --source=./ runtests.py +$HOME/.local/bin/poetry run coverage combine +$HOME/.local/bin/poetry run coverage html + +# cleanup +rm config.py +rm db-inited +rm -rf /tmp/ntoj_test_web +rm ../migration/config.py + +if [ "$1" == "web" ]; then + python3 -m http.server 8080 +fi diff --git a/src/server.py b/src/server.py index f7cf48cd..48102a7c 100644 --- a/src/server.py +++ b/src/server.py @@ -36,10 +36,10 @@ def stop_loop(deadline): for task in asyncio.all_tasks(): task.cancel() - io_loop.run_in_executor(func=db.close, executor=None) - io_loop.run_in_executor(func=rs.aclose, executor=None) - io_loop.run_in_executor(func=pool.aclose, executor=None) - io_loop.run_in_executor(func=JudgeServerClusterService.inst.disconnect_all_server, executor=None) + io_loop.add_callback(db.close) + io_loop.add_callback(rs.aclose) + io_loop.add_callback(pool.aclose) + io_loop.add_callback(JudgeServerClusterService.inst.disconnect_all_server) io_loop.stop() print('Shutdown finally') @@ -58,13 +58,13 @@ async def materialized_view_task(): db = await asyncpg.connect( database=config.DBNAME_OJ, user=config.DBUSER_OJ, password=config.DBPW_OJ, host='localhost' ) - rs = await aioredis.Redis(host='localhost', port=6379, db=1) + rs = await aioredis.Redis(host='localhost', port=6379, db=config.REDIS_DB) p = rs.pubsub() await p.subscribe('materialized_view_req') async def _update(): ret = await rs.incr('materialized_view_counter') - 1 - await db.execute('REFRESH MATERIALIZED VIEW challenge_state;') + await db.execute('SELECT refresh_challenge_state_incremental();') return ret counter = await _update() @@ -80,7 +80,7 @@ async def _update(): if __name__ == "__main__": - httpsock = tornado.netutil.bind_sockets(5500) + httpsock = tornado.netutil.bind_sockets(config.PORT) def run_materialized_view_task(): signal.signal(signal.SIGINT, lambda _, __: loop.stop()) @@ -101,7 +101,7 @@ def run_materialized_view_task(): db: asyncpg.Pool = asyncio.get_event_loop().run_until_complete( asyncpg.create_pool(database=config.DBNAME_OJ, user=config.DBUSER_OJ, password=config.DBPW_OJ, host='localhost') ) - pool = aioredis.ConnectionPool.from_url("redis://localhost", db=1) + pool = aioredis.ConnectionPool.from_url("redis://localhost", db=config.REDIS_DB) rs = aioredis.Redis.from_pool(pool) services_init(db, rs) diff --git a/src/services/board.py b/src/services/board.py index 843ab7a3..7ed18de7 100644 --- a/src/services/board.py +++ b/src/services/board.py @@ -1,10 +1,5 @@ import datetime -from msgpack import packb, unpackb - -from services.group import GroupService - - class BoardConst: STATUS_ONLINE = 0 STATUS_HIDDEN = 1 diff --git a/src/services/chal.py b/src/services/chal.py index ff1fc73a..9c58368a 100644 --- a/src/services/chal.py +++ b/src/services/chal.py @@ -1,13 +1,11 @@ from dataclasses import dataclass import datetime -import json import os import config from services.judge import JudgeServerClusterService -from services.log import LogService from services.pro import ProService -from services.user import Account, UserConst +from services.user import Account class ChalConst: @@ -115,7 +113,7 @@ def get_sql_query_str(self): if self.contest != 0: query += f' AND "challenge"."contest_id"={self.contest} ' else: - query += f' AND "challenge"."contest_id"=0 ' + query += ' AND "challenge"."contest_id"=0 ' return query @@ -293,6 +291,7 @@ async def get_chal(self, chal_id): ) async def emit_chal(self, chal_id, pro_id, testm_conf, comp_type, pri: int): + from services.pro import ProConst chal_id = int(chal_id) pro_id = int(pro_id) @@ -309,32 +308,36 @@ async def emit_chal(self, chal_id, pro_id, testm_conf, comp_type, pri: int): result = result[0] acct_id, contest_id, timestamp = int(result['acct_id']), int(result['contest_id']), result['timestamp'] + limit = testm_conf['limit'] + + if comp_type in limit: + timelimit = limit[comp_type]['timelimit'] + memlimit = limit[comp_type]['memlimit'] + else: + timelimit = limit['default']['timelimit'] + memlimit = limit['default']['memlimit'] async with self.db.acquire() as con: testl = [] - for test_idx, test_conf in testm_conf.items(): + insert_sql = [] + for test_group_idx, test in testm_conf['test_group'].items(): testl.append( { - 'test_idx': test_idx, - 'timelimit': test_conf['timelimit'], - 'memlimit': test_conf['memlimit'], - 'metadata': test_conf['metadata'], + 'test_idx': test_group_idx, + 'timelimit': timelimit, + 'memlimit': memlimit, + 'metadata': test['metadata'], } ) + insert_sql.append(f'({chal_id}, {acct_id}, {pro_id}, {test_group_idx}, {ChalConst.STATE_JUDGE}, \'{timestamp}\')') - await con.execute( - ''' - INSERT INTO "test" - ("chal_id", "acct_id", "pro_id", "test_idx", "state", "timestamp") - VALUES ($1, $2, $3, $4, $5, $6); - ''', - chal_id, - acct_id, - pro_id, - test_idx, - ChalConst.STATE_JUDGE, - timestamp, - ) + await con.execute( + f''' + INSERT INTO "test" + ("chal_id", "acct_id", "pro_id", "test_idx", "state", "timestamp") VALUES + {','.join(insert_sql)}; + ''' + ) await self.rs.publish('materialized_view_req', (await self.rs.get('materialized_view_counter'))) @@ -346,9 +349,9 @@ async def emit_chal(self, chal_id, pro_id, testm_conf, comp_type, pri: int): await self.rs.publish('materialized_view_req', (await self.rs.get('materialized_view_counter'))) return None, None - chalmeta = test_conf['chalmeta'] + chalmeta = testm_conf['chalmeta'] - if test_conf['comp_type'] == 'makefile': + if testm_conf['is_makefile']: comp_type = 'makefile' await JudgeServerClusterService.inst.send( @@ -360,14 +363,13 @@ async def emit_chal(self, chal_id, pro_id, testm_conf, comp_type, pri: int): 'res_path': f'{pro_id}/res', 'metadata': chalmeta, 'comp_type': comp_type, - 'check_type': test_conf['check_type'], + 'check_type': ProConst.CHECKER_TYPE[testm_conf['check_type']], }, pro_id, contest_id, ) - await self.rs.hdel('rate@kernel_True', str(acct_id)) - await self.rs.hdel('rate@kernel_False', str(acct_id)) + await self.rs.hdel('rate', str(acct_id)) return None, None @@ -376,7 +378,6 @@ async def list_chal(self, off, num, acct: Account, flt: ChalSearchingParam): fltquery = flt.get_sql_query_str() max_status = ProService.inst.get_acct_limit(acct, contest=flt.contest != 0) - min_accttype = min(acct.acct_type, UserConst.ACCTTYPE_USER) async with self.db.acquire() as con: result = await con.fetch( @@ -391,11 +392,11 @@ async def list_chal(self, off, num, acct: Account, flt: ChalSearchingParam): ON "challenge"."pro_id" = "problem"."pro_id" AND "problem"."status" <= {max_status} LEFT JOIN "challenge_state" ON "challenge"."chal_id" = "challenge_state"."chal_id" - WHERE "account"."acct_type" >= {min_accttype} + WHERE 1=1 ''' + fltquery + f''' - ORDER BY "challenge"."timestamp" DESC OFFSET {off} LIMIT {num}; + ORDER BY "challenge"."chal_id" DESC OFFSET {off} LIMIT {num}; ''' ) @@ -440,7 +441,6 @@ async def get_single_chal_state_in_list( ): chal_id = int(chal_id) max_status = ProService.inst.get_acct_limit(acct) - min_accttype = min(acct.acct_type, UserConst.ACCTTYPE_USER) async with self.db.acquire() as con: result = await con.fetch( @@ -450,9 +450,8 @@ async def get_single_chal_state_in_list( INNER JOIN "account" ON "challenge"."acct_id" = "account"."acct_id" INNER JOIN "problem" ON "challenge"."pro_id" = "problem"."pro_id" INNER JOIN "challenge_state" ON "challenge"."chal_id" = "challenge_state"."chal_id" - WHERE "account"."acct_type" >= $1 AND "problem"."status" <= $2 AND "challenge_state"."chal_id" = $3; + WHERE "problem"."status" <= $1 AND "challenge_state"."chal_id" = $2; ''', - min_accttype, max_status, chal_id, ) @@ -470,7 +469,6 @@ async def get_single_chal_state_in_list( async def get_stat(self, acct: Account, flt: ChalSearchingParam): fltquery = flt.get_sql_query_str() - min_accttype = min(acct.acct_type, UserConst.ACCTTYPE_USER) async with self.db.acquire() as con: result = await con.fetch( @@ -480,7 +478,7 @@ async def get_stat(self, acct: Account, flt: ChalSearchingParam): 'ON "challenge"."acct_id" = "account"."acct_id" ' 'LEFT JOIN "challenge_state" ' 'ON "challenge"."chal_id"="challenge_state"."chal_id" ' - f'WHERE "account"."acct_type" >= {min_accttype}' + fltquery + ';' + 'WHERE 1=1' + fltquery + ';' ) ) diff --git a/src/services/code.py b/src/services/code.py index 0ef3235f..11c7e00c 100644 --- a/src/services/code.py +++ b/src/services/code.py @@ -37,7 +37,7 @@ async def get_code(self, chal_id: int, query_acct: Account): can_see = True elif contest_id != 0: - err, contest = await ContestService.inst.get_contest(contest_id) + _, contest = await ContestService.inst.get_contest(contest_id) if contest.is_admin(query_acct): can_see = True diff --git a/src/services/contests.py b/src/services/contests.py index 3ef729f9..443f725e 100644 --- a/src/services/contests.py +++ b/src/services/contests.py @@ -59,7 +59,7 @@ def is_running(self): return self.contest_start <= datetime.datetime.now().replace( tzinfo=datetime.timezone(datetime.timedelta(hours=+8))) < self.contest_end - def is_member(self, acct: Account = None, acct_id: int = None): + def is_member(self, acct: Account | None = None, acct_id: int | None = None): if acct is not None: return acct.acct_id in self.acct_list or acct.acct_id in self.admin_list @@ -68,7 +68,7 @@ def is_member(self, acct: Account = None, acct_id: int = None): assert acct is not None and acct_id is not None, 'one of args(acct or acct_id) must not None' - def is_admin(self, acct: Account = None, acct_id: int = None): + def is_admin(self, acct: Account | None = None, acct_id: int | None = None): if acct is not None: return acct.acct_id in self.admin_list diff --git a/src/services/log.py b/src/services/log.py index 87148be8..5fd41bb2 100644 --- a/src/services/log.py +++ b/src/services/log.py @@ -1,6 +1,14 @@ import datetime import json +tz = datetime.timezone(datetime.timedelta(hours=+8)) + +class _Encoder(json.JSONEncoder): + def default(self, o): + if isinstance(o, datetime.datetime): + return o.astimezone(tz).isoformat(timespec="seconds") + + return super().default(o) class LogService: def __init__(self, db, rs) -> None: @@ -10,7 +18,7 @@ def __init__(self, db, rs) -> None: async def add_log(self, message, log_type=None, params=None): if isinstance(params, dict): - params = json.dumps(params, ensure_ascii=False) + params = json.dumps(params, ensure_ascii=False, cls=_Encoder) message = str(message) @@ -27,8 +35,26 @@ async def add_log(self, message, log_type=None, params=None): ) return None, result[0]['log_id'] + async def view_log(self, log_id: int): + async with self.db.acquire() as con: + res = await con.fetch('SELECT log_id, message, "timestamp", params FROM log WHERE log_id = $1', int(log_id)) + if len(res) == 0: + return 'Enoext', None + res = res[0] + + params = '{}' + if res['params']: + params = json.dumps(json.loads(res['params']), indent=4) + + return None, { + 'log_id': res['log_id'], + 'message': res['message'], + 'timestamp': res['timestamp'].astimezone(tz).isoformat(timespec="seconds"), + 'params': params + } + + async def list_log(self, off, num, log_type=None): - tz = datetime.timezone(datetime.timedelta(hours=+8)) async with self.db.acquire() as con: if log_type is None: result = await con.fetch( diff --git a/src/services/pack.py b/src/services/pack.py index 30b3cbc1..61b4dac4 100644 --- a/src/services/pack.py +++ b/src/services/pack.py @@ -38,6 +38,10 @@ async def direct_copy(self, pack_token, dst): os.remove(f'tmp/{pack_token}') + def clear(self, pack_token): + if os.path.exists(f'tmp/{pack_token}'): + os.remove(f'tmp/{pack_token}') + async def _run_and_wait_process(self, program, *args): process = await asyncio.create_subprocess_exec(program, *args) returncode = await process.wait() diff --git a/src/services/pro.py b/src/services/pro.py index 318f17ca..2d3622b6 100644 --- a/src/services/pro.py +++ b/src/services/pro.py @@ -1,8 +1,6 @@ -import datetime import json import os import re -from collections import OrderedDict from msgpack import packb, unpackb @@ -20,6 +18,20 @@ class ProConst: STATUS_HIDDEN = 2 STATUS_OFFLINE = 3 + CHECKER_DIFF = 0 + CHECKER_DIFF_STRICT = 1 + CHECKER_DIFF_FLOAT = 2 + CHECKER_IOREDIR = 3 + CHECKER_CMS = 4 + + CHECKER_TYPE = { + CHECKER_DIFF: "diff", + CHECKER_DIFF_STRICT: "diff-strict", + CHECKER_DIFF_FLOAT: "diff-float", + CHECKER_IOREDIR: "ioredir", + CHECKER_CMS: "cms", + } + class ProService: NAME_MIN = 1 @@ -34,12 +46,18 @@ class ProService: PACKTYPE_CONTHTML = 2 PACKTYPE_CONTPDF = 3 + CHECKER_DIFF = 0 + CHECKER_DIFF_STRICT = 1 + CHECKER_DIFF_FLOAT = 2 + CHECKER_IOREDIR = 3 + CHECKER_CMS = 4 + def __init__(self, db, rs): self.db = db self.rs = rs ProService.inst = self - async def get_pro(self, pro_id, acct: Account = None, is_contest: bool = False): + async def get_pro(self, pro_id, acct: Account | None = None, is_contest: bool = False): """ Parameter `is_contest` should be set to true if you want to get contest problems and your account type is not kernel. @@ -54,7 +72,8 @@ async def get_pro(self, pro_id, acct: Account = None, is_contest: bool = False): async with self.db.acquire() as con: result = await con.fetch( """ - SELECT "name", "status", "expire", "tags" + SELECT "name", "status", "tags", "allow_submit", + "check_type", "is_makefile", "chalmeta", "limit" FROM "problem" WHERE "pro_id" = $1 AND "status" <= $2; """, pro_id, @@ -64,120 +83,68 @@ async def get_pro(self, pro_id, acct: Account = None, is_contest: bool = False): return "Enoext", None result = result[0] - name, status, expire, tags = ( + name, status, tags, allow_submit, check_type, is_makefile, limit, chalmeta = ( result["name"], result["status"], - result["expire"], result["tags"], + result["allow_submit"], + result["check_type"], + result["is_makefile"], + json.loads(result["limit"]), + json.loads(result["chalmeta"]), ) - if expire == datetime.datetime.max: - expire = None result = await con.fetch( """ - SELECT "test_idx", "compile_type", "score_type", - "check_type", "timelimit", "memlimit", "weight", "metadata", "chalmeta" + SELECT "test_idx", "weight", "metadata" FROM "test_config" WHERE "pro_id" = $1 ORDER BY "test_idx" ASC; """, pro_id, ) - if len(result) == 0: - return "Econf", None - testm_conf = OrderedDict() - for ( - test_idx, - comp_type, - score_type, - check_type, - timelimit, - memlimit, - weight, - metadata, - chalmeta, - ) in result: - testm_conf[test_idx] = { - "comp_type": comp_type, - "score_type": score_type, - "check_type": check_type, - "timelimit": timelimit, - "memlimit": memlimit, + test_groups = {} + for test_group_idx, weight, metadata in result: + test_groups[test_group_idx] = { "weight": weight, - "chalmeta": json.loads(chalmeta), "metadata": json.loads(metadata), } + testm_conf = { + "chalmeta": chalmeta, + "limit": limit, + "check_type": check_type, + "is_makefile": is_makefile, + "test_group": test_groups, + } + return ( None, { "pro_id": pro_id, "name": name, "status": status, - "expire": expire, "testm_conf": testm_conf, "tags": tags, + "allow_submit": allow_submit, }, ) - # TODO: Too many branch - # TODO: Too many local var - # TODO: Too many statement - async def list_pro(self, acct: Account = None, is_contest=False, state=False): - from services.chal import ChalConst - def _mp_encoder(obj): - if isinstance(obj, datetime.datetime): - return obj.astimezone(datetime.timezone.utc).timestamp() - - return obj - + async def list_pro(self, acct: Account = None, is_contest=False): if acct is None: max_status = ProService.STATUS_ONLINE - isguest = True - isadmin = False else: max_status = self.get_acct_limit(acct, contest=is_contest) - isguest = acct.is_guest() - isadmin = acct.is_kernel() - - statemap = {} - if state is True and isguest is False: - async with self.db.acquire() as con: - result = await con.fetch( - """ - SELECT "problem"."pro_id", - MIN("challenge_state"."state") AS "state" - FROM "challenge" - INNER JOIN "challenge_state" - ON "challenge"."chal_id" = "challenge_state"."chal_id" AND "challenge"."acct_id" = $1 - INNER JOIN "problem" - ON "challenge"."pro_id" = "problem"."pro_id" - WHERE "problem"."status" <= $2 - GROUP BY "problem"."pro_id" - ORDER BY "pro_id" ASC; - """, - int(acct.acct_id), - max_status, - ) - - statemap = {pro_id: state for pro_id, state in result} field = f"{max_status}|{[1, 2]}" # TODO: Remove class column on db if (prolist := (await self.rs.hget("prolist", field))) is not None: prolist = unpackb(prolist) - for pro in prolist: - if (expire := pro["expire"]) is not None: - expire = datetime.datetime.fromtimestamp(expire) - expire = expire.replace(tzinfo=datetime.timezone(datetime.timedelta(hours=8))) - - pro["expire"] = expire - else: async with self.db.acquire() as con: result = await con.fetch( """ - SELECT "problem"."pro_id", "problem"."name", "problem"."status", "problem"."expire", "problem"."tags" + SELECT "problem"."pro_id", "problem"."name", "problem"."status", "problem"."tags" FROM "problem" WHERE "problem"."status" <= $1 ORDER BY "pro_id" ASC; @@ -186,10 +153,7 @@ def _mp_encoder(obj): ) prolist = [] - for pro_id, name, status, expire, tags in result: - if expire == datetime.datetime.max: - expire = None - + for pro_id, name, status, tags in result: if tags is None: tags = "" @@ -198,78 +162,59 @@ def _mp_encoder(obj): "pro_id": pro_id, "name": name, "status": status, - "expire": expire, "tags": tags, } ) - await self.rs.hset("prolist", field, packb(prolist, default=_mp_encoder)) - - now = datetime.datetime.utcnow() - now = now.replace(tzinfo=datetime.timezone.utc) - - for pro in prolist: - pro_id = pro["pro_id"] - pro["state"] = statemap.get(pro_id) - - if isguest: - pro["tags"] = "" - - elif not isadmin: - if pro["state"] != ChalConst.STATE_AC: - pro["tags"] = "" - - if pro["expire"] is None: - pro["outdate"] = False - - else: - delta = (pro["expire"] - now).total_seconds() - if delta < 0: - pro["outdate"] = True - else: - pro["outdate"] = False + await self.rs.hset("prolist", field, packb(prolist)) return None, prolist - # TODO: Too many args - async def add_pro(self, name, status, expire, pack_token): + async def add_pro(self, name, status, pack_token): name_len = len(name) if name_len < ProService.NAME_MIN: return "Enamemin", None if name_len > ProService.NAME_MAX: return "Enamemax", None - del name_len if status < ProService.STATUS_ONLINE or status > ProService.STATUS_OFFLINE: return "Eparam", None - if expire is None: - expire = datetime.datetime(2099, 12, 31, 0, 0, 0, 0, tzinfo=datetime.timezone.utc) async with self.db.acquire() as con: result = await con.fetch( """ INSERT INTO "problem" - ("name", "status", "expire") - VALUES ($1, $2, $3) RETURNING "pro_id"; + ("name", "status") + VALUES ($1, $2) RETURNING "pro_id"; """, name, status, - expire, ) if len(result) != 1: return "Eunk", None pro_id = int(result[0]["pro_id"]) - _, _ = await self.unpack_pro(pro_id, ProService.PACKTYPE_FULL, pack_token) + if pack_token: + _, _ = await self.unpack_pro(pro_id, ProService.PACKTYPE_FULL, pack_token) + await con.execute("REFRESH MATERIALIZED VIEW test_valid_rate;") - await con.execute("REFRESH MATERIALIZED VIEW test_valid_rate;") + else: + os.mkdir(f"problem/{pro_id}") + os.chmod(os.path.abspath(f"problem/{pro_id}"), 0o755) + os.mkdir(f"problem/{pro_id}/res") + os.mkdir(f"problem/{pro_id}/http") + os.mkdir(f"problem/{pro_id}/res/testdata") + os.symlink( + os.path.abspath(f"problem/{pro_id}/http"), + f"{config.WEB_PROBLEM_STATIC_FILE_DIRECTORY}/{pro_id}", + ) await self.rs.delete("prolist") return None, pro_id # TODO: Too many args - async def update_pro(self, pro_id, name, status, expire, pack_type, pack_token=None, tags=""): + async def update_pro(self, pro_id, name, status, pack_type, pack_token=None, tags="", allow_submit=True): name_len = len(name) if name_len < ProService.NAME_MIN: return "Enamemin", None @@ -281,20 +226,17 @@ async def update_pro(self, pro_id, name, status, expire, pack_type, pack_token=N if tags and not re.match(r"^[a-zA-Z0-9-_, ]+$", tags): return "Etags", None - if expire is None: - expire = datetime.datetime(2099, 12, 31, 0, 0, 0, 0, tzinfo=datetime.timezone.utc) - async with self.db.acquire() as con: result = await con.fetch( """ UPDATE "problem" - SET "name" = $1, "status" = $2, "expire" = $3, "tags" = $4 + SET "name" = $1, "status" = $2, "tags" = $3, "allow_submit" = $4 WHERE "pro_id" = $5 RETURNING "pro_id"; """, name, status, - expire, tags, + allow_submit, int(pro_id), ) if len(result) != 1: @@ -311,58 +253,43 @@ async def update_pro(self, pro_id, name, status, expire, pack_type, pack_token=N return None, None - async def update_testcases(self, pro_id, testm_conf): - with open(f'problem/{pro_id}/conf.json', 'r') as f: - conf_json = json.load(f) + async def update_test_config(self, pro_id, testm_conf: dict): + insert_sql = [] + is_makefile = testm_conf['is_makefile'] + check_type = testm_conf['check_type'] + chalmeta = testm_conf['chalmeta'] + limit = testm_conf['limit'] + for test_group_idx, test_group_conf in testm_conf['test_group'].items(): + weight = test_group_conf['weight'] - for test_idx, test_conf in testm_conf.items(): - async with self.db.acquire() as con: - result = await con.fetch( - """ - UPDATE "test_config" - SET "metadata" = $1 - WHERE "pro_id" = $2 AND "test_idx" = $3 RETURNING "pro_id"; - """, - json.dumps(test_conf['metadata']), - int(pro_id), - test_idx - ) - if len(result) == 0: - return "Enoext", None - - conf_json['test'][test_idx]['data'] = test_conf['metadata']['data'] - - with open(f'problem/{pro_id}/conf.json', 'w') as f: - f.write(json.dumps(conf_json)) - - return None, None - - async def update_limit(self, pro_id, timelimit, memlimit): - if timelimit <= 0: - return "Etimelimitmin", None - if memlimit <= 0: - return "Ememlimitmin", None - - memlimit = memlimit * 1024 + sql = '({}, {}, {}, \'{}\')'.format(pro_id, test_group_idx, weight, json.dumps(test_group_conf['metadata'])) + insert_sql.append(sql) async with self.db.acquire() as con: - result = await con.fetch( - """ - UPDATE "test_config" - SET "timelimit" = $1, "memlimit" = $2 - WHERE "pro_id" = $3 RETURNING "pro_id"; - """, - int(timelimit), - int(memlimit), - int(pro_id), + await con.execute('DELETE FROM "test_config" WHERE "pro_id" = $1;', int(pro_id)) + await con.execute( + 'UPDATE "problem" SET is_makefile = $1, check_type = $2, chalmeta = $3, "limit" = $4 WHERE pro_id = $5', + is_makefile, check_type, json.dumps(chalmeta), json.dumps(limit), pro_id ) - if len(result) == 0: - return "Enoext", None + + if insert_sql: + await con.execute( + f""" + INSERT INTO "test_config" + ("pro_id", "test_idx", "weight", "metadata") + VALUES {','.join(insert_sql)}; + """ + ) + + await self.db.execute("REFRESH MATERIALIZED VIEW test_valid_rate;") + await self.rs.delete('rate') + await self.rs.hdel('pro_rate', pro_id) + await self.rs.publish('materialized_view_req', (await self.rs.get('materialized_view_counter'))) return None, None # TODO: 把這破函數命名改一下 - def get_acct_limit(self, acct: Account = None, contest=False): + def get_acct_limit(self, acct: Account | None = None, contest=False): if contest: return ProService.STATUS_CONTEST @@ -376,6 +303,7 @@ def get_acct_limit(self, acct: Account = None, contest=False): return ProService.STATUS_ONLINE async def unpack_pro(self, pro_id, pack_type, pack_token): + from services.chal import ChalConst def _clean_cont(prefix): try: os.remove(f"{prefix}cont.html") @@ -413,7 +341,6 @@ def _clean_cont(prefix): try: os.chmod(os.path.abspath(f"problem/{pro_id}"), 0o755) - # INFO: 正式上線請到config.py修改成正確路徑 os.symlink( os.path.abspath(f"problem/{pro_id}/http"), f"{config.WEB_PROBLEM_STATIC_FILE_DIRECTORY}/{pro_id}", @@ -428,40 +355,68 @@ def _clean_cont(prefix): except json.decoder.JSONDecodeError: return "Econf", None - comp_type = conf["compile"] - score_type = conf["score"] - check_type = conf["check"] - timelimit = conf["timelimit"] - memlimit = conf["memlimit"] * 1024 + is_makefile = conf["compile"] == 'makefile' + check_type = self._get_check_type(conf["check"]) chalmeta = conf["metadata"] # INFO: ioredir data + ALLOW_COMPILERS = list(ChalConst.ALLOW_COMPILERS) + ['default'] + if is_makefile: + ALLOW_COMPILERS = ['default', 'gcc', 'g++', 'clang', 'clang++'] + + if "limit" in conf: + limit = {lang: lim for lang, lim in conf["limit"].items() if lang in ALLOW_COMPILERS} + else: + limit = { + 'default': { + 'timelimit': conf["timelimit"], + 'memlimit': conf["memlimit"] * 1024 + } + } + async with self.db.acquire() as con: await con.execute('DELETE FROM "test_config" WHERE "pro_id" = $1;', int(pro_id)) + await con.execute( + 'UPDATE "problem" SET is_makefile = $1, check_type = $2, chalmeta = $3, "limit" = $4 WHERE pro_id = $5', + is_makefile, check_type, json.dumps(chalmeta), json.dumps(limit), pro_id + ) + + insert_sql = [] for test_idx, test_conf in enumerate(conf["test"]): + for i in range(len(test_conf["data"])): + test_conf["data"][i] = str(test_conf["data"][i]) + metadata = {"data": test_conf["data"]} + insert_sql.append(f"({pro_id}, {test_idx}, {test_conf['weight']}, \'{json.dumps(metadata)}\')") + + await con.execute( + f""" + INSERT INTO "test_config" + ("pro_id", "test_idx", "weight", "metadata") + VALUES {",".join(insert_sql)} + """ + ) - await con.execute( - """ - INSERT INTO "test_config" - ("pro_id", "test_idx", "compile_type", "score_type", "check_type", - "timelimit", "memlimit", "weight", "metadata", "chalmeta") - VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10); - """, - int(pro_id), - int(test_idx), - comp_type, - score_type, - check_type, - int(timelimit), - int(memlimit), - int(test_conf["weight"]), - json.dumps(metadata), - json.dumps(chalmeta), - ) return None, None + def _get_check_type(self, s: str): + if s == "diff": + return ProConst.CHECKER_DIFF + elif s == "diff-strict": + return ProConst.CHECKER_DIFF_STRICT + elif s == "diff-float": + return ProConst.CHECKER_DIFF_FLOAT + elif s == "ioredir": + return ProConst.CHECKER_IOREDIR + elif s == "cms": + return ProConst.CHECKER_CMS + +class ProClassConst: + OFFICIAL_PUBLIC = 0 + OFFICIAL_HIDDEN = 1 + USER_PUBLIC = 2 + USER_HIDDEN = 3 class ProClassService: def __init__(self, db, rs): @@ -469,11 +424,11 @@ def __init__(self, db, rs): self.rs = rs ProClassService.inst = self - async def get_pubclass(self, pubclass_id): + async def get_proclass(self, proclass_id): async with self.db.acquire() as con: res = await con.fetch( - 'SELECT "pubclass_id", "name", "list" FROM "pubclass" WHERE "pubclass_id" = $1;', - int(pubclass_id), + 'SELECT "proclass_id", "name", "desc", "list", "acct_id", "type" FROM "proclass" WHERE "proclass_id" = $1 ORDER BY "proclass_id" ASC;', + int(proclass_id), ) if len(res) != 1: @@ -481,42 +436,40 @@ async def get_pubclass(self, pubclass_id): return None, res[0] - async def get_pubclass_list(self): + async def get_proclass_list(self): async with self.db.acquire() as con: - res = await con.fetch('SELECT "pubclass_id", "name" FROM "pubclass";') + res = await con.fetch('SELECT "proclass_id", "name", "acct_id", "type" FROM "proclass";') return None, res - async def add_pubclass(self, pubclass_name, p_list): + async def add_proclass(self, name, p_list, desc, acct_id, proclass_type): async with self.db.acquire() as con: res = await con.fetchrow( """ - INSERT INTO "pubclass" ("name", "list") - VALUES ($1, $2) RETURNING "pubclass_id"; + INSERT INTO "proclass" ("name", "list", "desc", "acct_id", "type") + VALUES ($1, $2, $3, $4, $5) RETURNING "proclass_id"; """, - pubclass_name, + name, p_list, + desc, + acct_id, + proclass_type, ) return None, res[0] - async def remove_pubclass(self, pubclass_id): + async def remove_proclass(self, proclass_id): async with self.db.acquire() as con: - await con.execute('DELETE FROM "pubclass" WHERE "pubclass_id" = $1', int(pubclass_id)) + await con.execute('DELETE FROM "proclass" WHERE "proclass_id" = $1', int(proclass_id)) - async def update_pubclass(self, pubclass_id, pubclass_name, p_list): - pubclass_id = int(pubclass_id) + async def update_proclass(self, proclass_id, name, p_list, desc, proclass_type): + proclass_id = int(proclass_id) async with self.db.acquire() as con: await con.execute( - 'UPDATE "pubclass" SET "name" = $1, "list" = $2 WHERE "pubclass_id" = $3', - pubclass_name, + 'UPDATE "proclass" SET "name" = $1, "list" = $2, "desc" = $3, "type" = $4 WHERE "proclass_id" = $5', + name, p_list, - pubclass_id, + desc, + proclass_type, + proclass_id, ) - - async def get_priclass(self, acct_id): - pass - - async def get_priclass_list(self, acct_id): - pass - diff --git a/src/services/rate.py b/src/services/rate.py index ba69b739..e1bd5232 100644 --- a/src/services/rate.py +++ b/src/services/rate.py @@ -14,8 +14,7 @@ def __init__(self, db, rs) -> None: RateService.inst = self async def get_acct_rate_and_chal_cnt(self, acct: Account): - kernel = acct.is_kernel() - key = f'rate@kernel_{kernel}' + key = 'rate' acct_id = acct.acct_id if (rate_data := await self.rs.hget(key, acct_id)) is None: @@ -37,31 +36,24 @@ async def get_acct_rate_and_chal_cnt(self, acct: Account): ac_chal_cnt = ac_chal_cnt['count'] result = await con.fetch( - ( - 'SELECT ' - 'SUM("test_valid_rate"."rate" * ' - ' CASE WHEN "valid_test"."timestamp" < "valid_test"."expire" ' - ' THEN 1 ELSE ' - ' (1 - (GREATEST(date_part(\'days\',justify_interval(' - ' age("valid_test"."timestamp","valid_test"."expire") ' - ' + \'1 days\')),-1)) * 0.15) ' - ' END) ' - 'AS "rate" FROM "test_valid_rate" ' - 'INNER JOIN (' - ' SELECT "test"."pro_id","test"."test_idx",' - ' MIN("test"."timestamp") AS "timestamp","problem"."expire" ' - ' FROM "test" ' - ' INNER JOIN "account" ' - ' ON "test"."acct_id" = "account"."acct_id" ' - ' INNER JOIN "problem" ' - ' ON "test"."pro_id" = "problem"."pro_id" ' - ' WHERE "account"."acct_id" = $1 ' - ' AND "test"."state" = $2 ' - ' GROUP BY "test"."pro_id","test"."test_idx","problem"."expire"' - ') AS "valid_test" ' - 'ON "test_valid_rate"."pro_id" = "valid_test"."pro_id" ' - 'AND "test_valid_rate"."test_idx" = "valid_test"."test_idx";' - ), + ''' + SELECT + SUM("test_valid_rate"."rate") AS "rate" FROM "test_valid_rate" + INNER JOIN ( + SELECT "test"."pro_id","test"."test_idx", + MIN("test"."timestamp") AS "timestamp" + FROM "test" + INNER JOIN "account" + ON "test"."acct_id" = "account"."acct_id" + INNER JOIN "problem" + ON "test"."pro_id" = "problem"."pro_id" + WHERE "account"."acct_id" = $1 + AND "test"."state" = $2 + GROUP BY "test"."pro_id","test"."test_idx" + ) AS "valid_test" + ON "test_valid_rate"."pro_id" = "valid_test"."pro_id" + AND "test_valid_rate"."test_idx" = "valid_test"."test_idx"; + ''', acct_id, int(ChalConst.STATE_AC), ) @@ -145,22 +137,31 @@ async def map_rate_acct( self, acct: Account, contest_id: int = 0, starttime='1970-01-01 00:00:00.000', endtime='2100-01-01 00:00:00.000' ): + from services.pro import ProConst if isinstance(starttime, str): starttime = datetime.datetime.fromisoformat(starttime) if isinstance(endtime, str): endtime = datetime.datetime.fromisoformat(endtime) + problem_status_sql = '' + if contest_id != 0: + problem_status_sql = f'AND "problem"."status" = {ProConst.STATUS_CONTEST}' + elif acct.is_kernel(): + problem_status_sql = f'AND "problem"."status" <= {ProConst.STATUS_HIDDEN} AND "problem"."status" != {ProConst.STATUS_CONTEST}' + else: + problem_status_sql = f'AND "problem"."status" <= {ProConst.STATUS_ONLINE} AND "problem"."status" != {ProConst.STATUS_CONTEST}' + async with self.db.acquire() as con: result = await con.fetch( - ''' + f''' SELECT "challenge"."pro_id", MAX("challenge_state"."rate") AS "score", - COUNT("challenge_state") AS "count" + COUNT("challenge_state") AS "count", MIN("challenge_state"."state") as "state" FROM "challenge" INNER JOIN "challenge_state" ON "challenge"."chal_id" = "challenge_state"."chal_id" AND "challenge"."acct_id" = $1 INNER JOIN "problem" - ON "challenge"."pro_id" = "problem"."pro_id" + ON "challenge"."pro_id" = "problem"."pro_id" {problem_status_sql} WHERE "challenge"."contest_id" = $2 AND "challenge"."timestamp" >= $3 AND "challenge"."timestamp" <= $4 GROUP BY "challenge"."pro_id"; ''', @@ -171,10 +172,11 @@ async def map_rate_acct( ) statemap = {} - for pro_id, rate, count in result: + for pro_id, rate, count, state in result: statemap[pro_id] = { 'rate': rate, 'count': count, + 'state': state, } return None, statemap diff --git a/src/services/user.py b/src/services/user.py index e00a7c65..9858e18d 100644 --- a/src/services/user.py +++ b/src/services/user.py @@ -18,6 +18,8 @@ class UserConst: PW_MIN = 1 NAME_MAX = 27 # 3227 NAME_MIN = 1 + MOTTO_MIN = 0 + MOTTO_MAX = 100 ACCTTYPE_KERNEL = 0 ACCTTYPE_USER = 3 @@ -34,8 +36,9 @@ class Account: name: str photo: str cover: str + motto: str lastip: str - # TODO: Finish allow view other page + proclass_collection: list[int] def is_kernel(self): return self.acct_type == UserConst.ACCTTYPE_KERNEL @@ -45,7 +48,7 @@ def is_guest(self): GUEST_ACCOUNT = Account( - acct_id=0, acct_type=UserConst.ACCTTYPE_GUEST, name='', mail='', photo='', cover='', lastip='' + acct_id=0, acct_type=UserConst.ACCTTYPE_GUEST, name='', mail='', photo='', cover='', lastip='', motto='', proclass_collection=[] ) @@ -126,9 +129,8 @@ async def sign_up(self, mail, pw, name): except (asyncpg.IntegrityConstraintViolationError, asyncpg.UniqueViolationError): async with self.db.acquire() as con: - # FIXME: if exist, decrease account_acct_id_seq - result = await con.fetch("SELECT currval('account_acct_id_seq');") - cur_acct_id = int(result[0]['currval']) + result = await con.fetch("SELECT last_value FROM account_acct_id_seq;") + cur_acct_id = int(result[0]['last_value']) await con.execute(f"SELECT setval('account_acct_id_seq', {cur_acct_id - 1}, true);") return 'Eexist', None @@ -152,7 +154,7 @@ async def info_sign(self, req): acct_id = int(acct_id) - if (acct := (await self.rs.exists(f'account@{acct_id}'))) is None: + if (await self.rs.exists(f'account@{acct_id}')) is None: async with self.db.acquire() as con: result = await con.fetch('SELECT "acct_id","lastip" FROM "account" WHERE "acct_id" = $1;', acct_id) @@ -203,7 +205,7 @@ async def info_acct(self, acct_id) -> Tuple[None, Account] | Tuple[Literal['Enoe async with self.db.acquire() as con: result = await con.fetch( ''' - SELECT "name", "acct_type", "mail", "photo", "cover", "lastip" + SELECT "name", "acct_type", "mail", "photo", "cover", "lastip", "motto", "proclass_collection" FROM "account" WHERE "acct_id" = $1; ''', acct_id, @@ -220,7 +222,9 @@ async def info_acct(self, acct_id) -> Tuple[None, Account] | Tuple[Literal['Enoe name=result['name'], photo=result['photo'], cover=result['cover'], + motto=result['motto'], lastip=result['lastip'], + proclass_collection=result['proclass_collection'], ) b_acct = pickle.dumps(acct) @@ -229,7 +233,7 @@ async def info_acct(self, acct_id) -> Tuple[None, Account] | Tuple[Literal['Enoe return None, acct - async def update_acct(self, acct_id, acct_type, name, photo, cover): + async def update_acct(self, acct_id, acct_type, name, photo, cover, motto, proclass_collection): if acct_type not in [UserConst.ACCTTYPE_KERNEL, UserConst.ACCTTYPE_USER]: return 'Eparam1', None name_len = len(name) @@ -237,18 +241,26 @@ async def update_acct(self, acct_id, acct_type, name, photo, cover): return 'Enamemin', None if name_len > UserConst.NAME_MAX: return 'Enamemax', None + motto_len = len(motto) + if motto_len < UserConst.MOTTO_MIN: + return 'Emottomin', None + if motto_len > UserConst.MOTTO_MAX: + return 'Emottomax', None + acct_id = int(acct_id) async with self.db.acquire() as con: result = await con.fetch( ''' UPDATE "account" - SET "acct_type" = $1, "name" = $2, "photo" = $3, "cover" = $4 WHERE "acct_id" = $5 RETURNING "acct_id"; + SET "acct_type" = $1, "name" = $2, "photo" = $3, "cover" = $4, "motto" = $5, "proclass_collection" = $6 WHERE "acct_id" = $7 RETURNING "acct_id"; ''', acct_type, name, photo, cover, + motto, + proclass_collection, acct_id, ) if len(result) != 1: @@ -317,7 +329,9 @@ async def list_acct( name=name, photo='', cover='', + motto='', lastip=lastip, + proclass_collection=[], ) if private: diff --git a/src/static/index.js b/src/static/index.js index 81322123..102c2207 100644 --- a/src/static/index.js +++ b/src/static/index.js @@ -126,6 +126,10 @@ var index = new function() { $(document).on('click', 'a', function(e) { let cur_href = location.href; let href = $(this).attr('href'); + if (href == undefined || href.length == 0) return; + if ($(this).attr('target') !== "") { + return; + } window.history.pushState(null, document.title, $(this).attr('href')); if (href.startsWith('?')) { @@ -175,7 +179,7 @@ var index = new function() { acct_id = $('#indexjs').attr('acct_id'); contest_id = $('#indexjs').attr('contest_id'); - if (acct_id != '') { + if (acct_id != '0') { that.acct_id = parseInt(acct_id); j_navlist.find('li.leave').show(); } else { @@ -259,7 +263,7 @@ var index = new function() { return; } - progressbar.querySelector('.modal-header').textContent = title; + progressbar.querySelector('.text-center').textContent = title; } that.remove_progress_bar = function () { diff --git a/src/static/pack.js b/src/static/pack.js index c9812d27..543d4592 100644 --- a/src/static/pack.js +++ b/src/static/pack.js @@ -32,10 +32,17 @@ var pack = new function() { var lt = 0; ws.onopen = function(e) { - ws.send(JSON.stringify({ - 'pack_token' : pack_token, - 'pack_size' : file.size - })); + file.arrayBuffer() + .then(file_buffer => { + const word_array = CryptoJS.lib.WordArray.create(new Uint8Array(file_buffer)); + const hash_hex = CryptoJS.SHA1(word_array).toString(CryptoJS.enc.Hex); + + ws.send(JSON.stringify({ + 'pack_token' : pack_token, + 'pack_size' : file.size, + 'sha-1': hash_hex, + })); + }); }; ws.onmessage = function(e) { var size; @@ -52,7 +59,7 @@ var pack = new function() { remain -= size; ct = new Date().getTime(); - if(ct - lt > 500) { + if (ct - lt > 500) { defer.notify(off / file.size); lt = ct; } diff --git a/src/static/templ/acct-config.html b/src/static/templ/acct/acct-config.html similarity index 88% rename from src/static/templ/acct-config.html rename to src/static/templ/acct/acct-config.html index 8d1283cb..c0e6ce08 100644 --- a/src/static/templ/acct-config.html +++ b/src/static/templ/acct/acct-config.html @@ -1,6 +1,6 @@ {{ set_page_title("Edit Account") }} -{% if acct.acct_id != acct_id and not isadmin %} +{% if acct.acct_id != user.acct_id and not user.is_kernel() %} You don't have permission. {% else %} + + + +
+
+
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + +
+ +
+ + + +
+
+
diff --git a/src/static/templ/acct/proclass-list.html b/src/static/templ/acct/proclass-list.html new file mode 100644 index 00000000..99ca91e3 --- /dev/null +++ b/src/static/templ/acct/proclass-list.html @@ -0,0 +1,33 @@ +
+ + + + + + + + + + + + {% for proclass in proclass_list %} + + + + {% set t = "" %} + {% if proclass['type'] == 2 %} + {% set t = "Public" %} + {% elif proclass['type'] == 3 %} + {% set t = "Hidden" %} + {% end %} + + + + {% end %} + +
#NameType + +
{{ proclass['proclass_id'] }}{{ proclass['name'] }}{{ t }} + +
+
diff --git a/src/static/templ/acct/proclass-update.html b/src/static/templ/acct/proclass-update.html new file mode 100644 index 00000000..9853aa5d --- /dev/null +++ b/src/static/templ/acct/proclass-update.html @@ -0,0 +1,119 @@ + + + + +
+
+
+ + +
+ +
+ + +
+ +
+ + {% set list = str(proclass['list']) %} + +
+ +
+ + +
+ +
+ + + + +
+
+
diff --git a/src/static/templ/acct.html b/src/static/templ/acct/profile.html similarity index 96% rename from src/static/templ/acct.html rename to src/static/templ/acct/profile.html index 46fe5e0d..d09de566 100644 --- a/src/static/templ/acct.html +++ b/src/static/templ/acct/profile.html @@ -1,6 +1,3 @@ - - - +{% if cur_proclass %} + +{% end %} + + +
- - -
- - -
- -
- - + + {% if cur_proclass %} + + + {% else %} + + + {% end %} +
+ + + Manage
@@ -149,7 +294,7 @@ - {% if isadmin %} + {% if user.is_kernel() %} {% end %} @@ -222,8 +367,8 @@ {% set postfix = '' %} - {% if cur_pubclass != None %} - {% set postfix = postfix + '&pubclass_id=%s' % cur_pubclass['pubclass_id'] %} + {% if cur_proclass != None %} + {% set postfix = postfix + '&proclass_id=%s' % cur_proclass['proclass_id'] %} {% end %} {% if flt['problem_show'] != 'all' %} {% set postfix = postfix + f"&show={flt['problem_show']}" %} @@ -231,7 +376,7 @@ {% if flt['order'] != None %} {% set postfix = postfix + f"&order={flt['order']}" %} {% end %} - {% if isadmin and flt['online'] != False %} + {% if user.is_kernel() and flt['online'] %} {% set postfix = postfix + f"&online={flt['online']}" %} {% end %} {% if flt['reverse'] != False %} diff --git a/src/static/templ/question.html b/src/static/templ/question.html index 12b09bc3..28ece7bc 100644 --- a/src/static/templ/question.html +++ b/src/static/templ/question.html @@ -1,7 +1,7 @@
- Account : {{ acct.name }} {{ acct.acct_id }} + Account : {{ user.name }} {{ user.acct_id }}
{% for count, ques in enumerate(ques_list) %} diff --git a/src/static/templ/report-problem.html b/src/static/templ/report-problem.html index 16a8da13..28408091 100644 --- a/src/static/templ/report-problem.html +++ b/src/static/templ/report-problem.html @@ -7,7 +7,7 @@ j_submit.find('button.submit').on('click', function(e) { let info = j_submit.find('textarea.info').val(); let chal_id = "{{ chal_id }}"; - let acct_id = "{{ acct.acct_id }}"; + let acct_id = "{{ user.acct_id }}"; $.post('/oj/be/question', { 'reqtype': 'ask', diff --git a/src/static/templ/sign.html b/src/static/templ/sign.html index d9a00b57..2832045f 100644 --- a/src/static/templ/sign.html +++ b/src/static/templ/sign.html @@ -27,9 +27,9 @@ }); $('#signin').find('button.submit').on('click', function(e) { - var j_signin = $('#signin'); - var mail = j_signin.find('input.mail').val(); - var pw = j_signin.find('input.pw').val(); + let j_signin = $('#signin'); + let mail = j_signin.find('input.mail').val(); + let pw = j_signin.find('input.pw').val(); $.post('/oj/be/sign', { 'reqtype': 'signin', @@ -50,12 +50,30 @@ }); }); + $("#togglePasswordVisible").on('click', function(e) { + e.preventDefault(); + let password = $("#signin").find('input.pw'); + let eye_open = document.getElementById("eyeOpen"); + let eye_close = document.getElementById("eyeClose"); + + if (password.attr('type') === 'password') { + password.attr('type', 'text'); + eye_open.style.display = 'none'; + eye_close.style.display = 'block'; + + } else { + password.attr('type', 'password'); + eye_open.style.display = 'block'; + eye_close.style.display = 'none'; + } + }); + $('#signup').find('button.submit').on('click', function(e) { - var j_signup = $('#signup'); - var name = j_signup.find('input.name').val(); - var mail = j_signup.find('input.mail').val(); - var pw = j_signup.find('input.pw').val(); - var repeat = j_signup.find('input.repeat').val(); + let j_signup = $('#signup'); + let name = j_signup.find('input.name').val(); + let mail = j_signup.find('input.mail').val(); + let pw = j_signup.find('input.pw').val(); + let repeat = j_signup.find('input.repeat').val(); if(pw != repeat) { $('#signup div.print').print('Repeat incorrect'); @@ -68,10 +86,9 @@ 'pw': pw, 'name': name, }, function(res) { - var msg = 'Unknown'; - - if(res[0] == 'E') { + let msg = 'Unknown'; + if (res[0] == 'E') { if (res == 'Eexist') { msg = 'Account existed'; } else if (res == 'Emailmin') { @@ -115,8 +132,15 @@
- +
+ + +
+
diff --git a/src/static/templ/submit.html b/src/static/templ/submit.html index 070535f1..80a22d8c 100644 --- a/src/static/templ/submit.html +++ b/src/static/templ/submit.html @@ -1,5 +1,5 @@ {% from services.chal import ChalConst %} -submit +