From 45f5709214264b5b52383b97b2e95a0bf9c7570e Mon Sep 17 00:00:00 2001 From: Damien Elmes Date: Wed, 14 Jun 2023 22:47:37 +1000 Subject: [PATCH] Migrate to protobuf-es (#2547) * Fix .no-reduce-motion missing from graphs spinner, and not being honored * Begin migration from protobuf.js -> protobuf-es Motivation: - Protobuf-es has a nicer API: messages are represented as classes, and fields which should exist are not marked as nullable. - As it uses modules, only the proto messages we actually use get included in our bundle output. Protobuf.js put everything in a namespace, which prevented tree-shaking, and made it awkward to access inner messages. - ./run after touching a proto file drops from about 8s to 6s on my machine. The tradeoff is slower decoding/encoding (#2043), but that was mainly a concern for the graphs page, and was unblocked by https://github.com/ankitects/anki/commit/37151213cd9d431f449ba4b3bc4c0329a1d9af78 Approach/notes: - We generate the new protobuf-es interface in addition to existing protobuf.js interface, so we can migrate a module at a time, starting with the graphs module. - rslib:proto now generates RPC methods for TS in addition to the Python interface. The input-arg-unrolling behaviour of the Python generation is not required here, as we declare the input arg as a PlainMessage, which marks it as requiring all fields to be provided. - i64 is represented as bigint in protobuf-es. We were using a patch to protobuf.js to get it to output Javascript numbers instead of long.js types, but now that our supported browser versions support bigint, it's probably worth biting the bullet and migrating to bigint use. Our IDs fit comfortably within MAX_SAFE_INTEGER, but that may not hold for future fields we add. - Oneofs are handled differently in protobuf-es, and are going to need some refactoring. Other notable changes: - Added a --mkdir arg to our build runner, so we can create a dir easily during the build on Windows. - Simplified the preference handling code, by wrapping the preferences in an outer store, instead of a separate store for each individual preference. This means a change to one preference will trigger a redraw of all components that depend on the preference store, but the redrawing is cheap after moving the data processing to Rust, and it makes the code easier to follow. - Drop async(Reactive).ts in favour of more explicit handling with await blocks/updating. - Renamed add_inputs_to_group() -> add_dependency(), and fixed it not adding dependencies to parent groups. Renamed add() -> add_action() for clarity. * Remove a couple of unused proto imports * Migrate card info * Migrate congrats, image occlusion, and tag editor + Fix imports for multi-word proto files. * Migrate change-notetype * Migrate deck options * Bump target to es2020; simplify ts lib list Have used caniuse.com to confirm Chromium 77, iOS 14.5 and the Chrome on Android support the full es2017-es2020 features. * Migrate import-csv * Migrate i18n and fix missing output types in .js * Migrate custom scheduling, and remove protobuf.js To mostly maintain our old API contract, we make use of protobuf-es's ability to convert to JSON, which follows the same format as protobuf.js did. It doesn't cover all case: users who were previously changing the variant of a type will need to update their code, as assigning to a new variant no longer automatically removes the old one, which will cause an error when we try to convert back from JSON. But I suspect the large majority of users are adjusting the current variant rather than creating a new one, and this saves us having to write proxy wrappers, so it seems like a reasonable compromise. One other change I made at the same time was to rename value->kind for the oneofs in our custom study protos, as 'value' was easily confused with the 'case/value' output that protobuf-es has. With protobuf.js codegen removed, touching a proto file and invoking ./run drops from about 8s to 6s. This closes #2043. * Allow tree-shaking on protobuf types * Display backend error messages in our ts alert() * Make sourcemap generation opt-in for ts-run Considerably slows down build, and not used most of the time. --- Cargo.lock | 3 + build/configure/src/aqt.rs | 40 +- build/configure/src/bundle.rs | 26 +- build/configure/src/proto.rs | 4 +- build/configure/src/pylib.rs | 16 +- build/configure/src/python.rs | 18 +- build/configure/src/rust.rs | 36 +- build/configure/src/web.rs | 87 +-- build/ninja_gen/src/archives.rs | 6 +- build/ninja_gen/src/build.rs | 64 ++- build/ninja_gen/src/cargo.rs | 2 +- build/ninja_gen/src/configure.rs | 2 +- build/ninja_gen/src/node.rs | 110 +++- build/ninja_gen/src/python.rs | 4 +- build/ninja_gen/src/sass.rs | 2 +- build/runner/Cargo.toml | 3 + build/runner/src/main.rs | 10 +- build/runner/src/run.rs | 13 +- docs/protobuf.md | 6 +- package.json | 24 +- proto/anki/image_occlusion.proto | 2 - proto/anki/scheduler.proto | 18 +- pylib/anki/scheduler/v3.py | 7 +- pylib/tools/markpure.py | 28 + qt/aqt/mediasrv.py | 8 +- qt/aqt/reviewer.py | 14 +- rslib/proto/build.rs | 4 + rslib/proto/ts.rs | 204 +++++++ rslib/proto/utils.rs | 45 ++ rslib/src/backend/scheduler/mod.rs | 17 + .../src/backend/scheduler/states/filtered.rs | 14 +- rslib/src/backend/scheduler/states/mod.rs | 12 +- rslib/src/backend/scheduler/states/normal.rs | 22 +- sass/base.scss | 5 - tools/ts-run | 2 +- ts/bundle_svelte.mjs | 2 +- ts/card-info/CardInfo.svelte | 21 +- ts/card-info/CardStats.svelte | 12 +- ts/card-info/Revlog.svelte | 33 +- ts/card-info/index.ts | 2 +- ts/change-notetype/index.ts | 20 +- ts/change-notetype/lib.test.ts | 32 +- ts/change-notetype/lib.ts | 36 +- ts/components/Select.svelte | 6 +- ts/components/SelectOption.svelte | 6 +- ts/congrats/CongratsPage.svelte | 4 +- ts/congrats/index.ts | 6 +- ts/congrats/lib.ts | 4 +- ts/deck-options/DailyLimits.svelte | 8 +- ts/deck-options/DisplayOrder.svelte | 29 +- ts/deck-options/NewOptions.svelte | 5 +- ts/deck-options/index.ts | 10 +- ts/deck-options/lib.test.ts | 35 +- ts/deck-options/lib.ts | 59 +- ts/graphs/AddedGraph.svelte | 11 +- ts/graphs/ButtonsGraph.svelte | 4 +- ts/graphs/CalendarGraph.svelte | 35 +- ts/graphs/CardCounts.svelte | 15 +- ts/graphs/EaseGraph.svelte | 11 +- ts/graphs/FutureDue.svelte | 15 +- ts/graphs/GraphsPage.svelte | 14 +- ts/graphs/HourGraph.svelte | 4 +- ts/graphs/IntervalsGraph.svelte | 11 +- ts/graphs/RangeBox.svelte | 2 +- ts/graphs/ReviewsGraph.svelte | 4 +- ts/graphs/TodayStats.svelte | 4 +- ts/graphs/WithGraphData.svelte | 80 ++- ts/graphs/added.ts | 4 +- ts/graphs/buttons.ts | 6 +- ts/graphs/calendar.ts | 12 +- ts/graphs/card-counts.ts | 6 +- ts/graphs/ease.ts | 4 +- ts/graphs/future-due.ts | 4 +- ts/graphs/graph-helpers.ts | 12 +- ts/graphs/hours.ts | 9 +- ts/graphs/intervals.ts | 4 +- ts/graphs/reviews.ts | 4 +- ts/graphs/today.ts | 4 +- ts/image-occlusion/add-or-update-note.ts | 24 +- ts/image-occlusion/lib.ts | 61 -- ts/image-occlusion/mask-editor.ts | 20 +- ts/import-csv/DeckDupeCheckSwitch.svelte | 8 +- ts/import-csv/DeckSelector.svelte | 6 +- ts/import-csv/DelimiterSelector.svelte | 5 +- ts/import-csv/DupeResolutionSelector.svelte | 10 +- ts/import-csv/FieldMapper.svelte | 10 +- ts/import-csv/ImportCsvPage.svelte | 103 ++-- ts/import-csv/NotetypeSelector.svelte | 6 +- ts/import-csv/Preview.svelte | 4 +- ts/import-csv/index.ts | 47 +- ts/import-csv/lib.ts | 58 +- ts/lib/i18n/utils.ts | 4 +- ts/lib/post.ts | 48 ++ ts/lib/postrequest.ts | 26 - ts/lib/proto.ts | 94 ---- ts/licenses.json | 124 +---- ts/patches/protobufjs+7.2.1.patch | 29 - ts/reviewer/answering.ts | 62 ++- ts/reviewer/lib.test.ts | 146 +++++ ts/reviewer/tsconfig.json | 3 +- ts/sveltelib/async.ts | 31 -- ts/sveltelib/asyncReactive.ts | 53 -- ts/sveltelib/preferences.ts | 90 +-- ts/tag-editor/TagEditor.svelte | 7 +- ts/tsconfig.json | 13 +- yarn.lock | 521 ++---------------- 106 files changed, 1404 insertions(+), 1696 deletions(-) create mode 100644 pylib/tools/markpure.py create mode 100644 rslib/proto/ts.rs create mode 100644 rslib/proto/utils.rs create mode 100644 ts/lib/post.ts delete mode 100644 ts/lib/postrequest.ts delete mode 100644 ts/lib/proto.ts delete mode 100644 ts/patches/protobufjs+7.2.1.patch create mode 100644 ts/reviewer/lib.test.ts delete mode 100644 ts/sveltelib/async.ts delete mode 100644 ts/sveltelib/asyncReactive.ts diff --git a/Cargo.lock b/Cargo.lock index 518421334..80623840a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3390,8 +3390,11 @@ dependencies = [ name = "runner" version = "0.0.0" dependencies = [ + "anki_io", + "anyhow", "camino", "clap 4.2.1", + "itertools", "junction", "termcolor", "workspace-hack", diff --git a/build/configure/src/aqt.rs b/build/configure/src/aqt.rs index d2e5b160b..8d50c8b25 100644 --- a/build/configure/src/aqt.rs +++ b/build/configure/src/aqt.rs @@ -42,7 +42,7 @@ fn build_forms(build: &mut Build) -> Result<()> { py_files.push(outpath.replace(".ui", "_qt5.py")); py_files.push(outpath.replace(".ui", "_qt6.py")); } - build.add( + build.add_action( "qt/aqt:forms", RunCommand { command: ":pyenv:bin", @@ -65,7 +65,7 @@ fn build_forms(build: &mut Build) -> Result<()> { /// files into a separate folder, the generated files are exported as a separate /// _aqt module. fn build_generated_sources(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "qt/aqt:hooks.py", RunCommand { command: ":pyenv:bin", @@ -79,7 +79,7 @@ fn build_generated_sources(build: &mut Build) -> Result<()> { }, }, )?; - build.add( + build.add_action( "qt/aqt:sass_vars", RunCommand { command: ":pyenv:bin", @@ -98,7 +98,7 @@ fn build_generated_sources(build: &mut Build) -> Result<()> { )?; // we need to add a py.typed file to the generated sources, or mypy // will ignore them when used with the generated wheel - build.add( + build.add_action( "qt/aqt:py.typed", CopyFile { input: "qt/aqt/py.typed".into(), @@ -125,7 +125,7 @@ fn build_css(build: &mut Build) -> Result<()> { let mut out_path = out_dir.join(stem); out_path.set_extension("css"); - build.add( + build.add_action( "qt/aqt:data/web/css", CompileSass { input: scss.into(), @@ -143,7 +143,7 @@ fn build_css(build: &mut Build) -> Result<()> { ], ".css", ); - build.add( + build.add_action( "qt/aqt:data/web/css", CopyFiles { inputs: other_ts_css.into(), @@ -153,7 +153,7 @@ fn build_css(build: &mut Build) -> Result<()> { } fn build_imgs(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "qt/aqt:data/web/imgs", CopyFiles { inputs: inputs![glob!["qt/aqt/data/web/imgs/*"]], @@ -164,7 +164,7 @@ fn build_imgs(build: &mut Build) -> Result<()> { fn build_js(build: &mut Build) -> Result<()> { for ts_file in &["deckbrowser", "webview", "toolbar", "reviewer-bottom"] { - build.add( + build.add_action( "qt/aqt:data/web/js", EsbuildScript { script: "ts/transform_ts.mjs".into(), @@ -177,7 +177,7 @@ fn build_js(build: &mut Build) -> Result<()> { } let files = inputs![glob!["qt/aqt/data/web/js/*"]]; eslint(build, "aqt", "qt/aqt/data/web/js", files.clone())?; - build.add( + build.add_action( "check:typescript:aqt", TypescriptCheck { tsconfig: "qt/aqt/data/web/js/tsconfig.json".into(), @@ -188,7 +188,7 @@ fn build_js(build: &mut Build) -> Result<()> { inputs![":ts:editor", ":ts:reviewer:reviewer.js", ":ts:mathjax"], ".js", ); - build.add( + build.add_action( "qt/aqt:data/web/js", CopyFiles { inputs: files_from_ts.into(), @@ -199,8 +199,8 @@ fn build_js(build: &mut Build) -> Result<()> { } fn build_vendor_js(build: &mut Build) -> Result<()> { - build.add("qt/aqt:data/web/js/vendor:mathjax", copy_mathjax())?; - build.add( + build.add_action("qt/aqt:data/web/js/vendor:mathjax", copy_mathjax())?; + build.add_action( "qt/aqt:data/web/js/vendor", CopyFiles { inputs: inputs![ @@ -216,7 +216,7 @@ fn build_vendor_js(build: &mut Build) -> Result<()> { } fn build_pages(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "qt/aqt:data/web/pages", CopyFiles { inputs: inputs![":ts:pages"], @@ -228,21 +228,21 @@ fn build_pages(build: &mut Build) -> Result<()> { fn build_icons(build: &mut Build) -> Result<()> { build_themed_icons(build)?; - build.add( + build.add_action( "qt/aqt:data/qt/icons:mdi_unthemed", CopyFiles { inputs: inputs![":node_modules:mdi_unthemed"], output_folder: "qt/_aqt/data/qt/icons", }, )?; - build.add( + build.add_action( "qt/aqt:data/qt/icons:from_src", CopyFiles { inputs: inputs![glob!["qt/aqt/data/qt/icons/*.{png,svg}"]], output_folder: "qt/_aqt/data/qt/icons", }, )?; - build.add( + build.add_action( "qt/aqt:data/qt/icons", RunCommand { command: ":pyenv:bin", @@ -280,7 +280,7 @@ fn build_themed_icons(build: &mut Build) -> Result<()> { if let Some(&extra) = themed_icons_with_extra.get(stem) { colors.extend(extra); } - build.add( + build.add_action( "qt/aqt:data/qt/icons:mdi_themed", BuildThemedIcon { src_icon: path, @@ -332,7 +332,7 @@ impl BuildAction for BuildThemedIcon<'_> { fn build_macos_helper(build: &mut Build) -> Result<()> { if cfg!(target_os = "macos") { - build.add( + build.add_action( "qt/aqt:data/lib:libankihelper", RunCommand { command: ":pyenv:bin", @@ -351,7 +351,7 @@ fn build_macos_helper(build: &mut Build) -> Result<()> { } fn build_wheel(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "wheels:aqt", BuildWheel { name: "aqt", @@ -371,7 +371,7 @@ fn check_python(build: &mut Build) -> Result<()> { inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")], )?; - build.add( + build.add_action( "check:pytest:aqt", PythonTest { folder: "qt/tests", diff --git a/build/configure/src/bundle.rs b/build/configure/src/bundle.rs index a2caad4dd..090446fa6 100644 --- a/build/configure/src/bundle.rs +++ b/build/configure/src/bundle.rs @@ -145,7 +145,7 @@ fn download_dist_folder_deps(build: &mut Build) -> Result<()> { )?; bundle_deps.extend([":extract:linux_qt_plugins"]); } - build.add_inputs_to_group( + build.add_dependency( "bundle:deps", inputs![bundle_deps .iter() @@ -189,7 +189,7 @@ fn setup_primary_venv(build: &mut Build) -> Result<()> { if cfg!(windows) { qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"]; } - build.add( + build.add_action( PRIMARY_VENV.label, PythonEnvironment { folder: PRIMARY_VENV.path_without_builddir, @@ -210,7 +210,7 @@ fn setup_qt5_venv(build: &mut Build) -> Result<()> { "python/requirements.qt5_15.txt" } ]; - build.add( + build.add_action( QT5_VENV.label, PythonEnvironment { folder: QT5_VENV.path_without_builddir, @@ -238,7 +238,7 @@ impl BuildAction for InstallAnkiWheels { } fn install_anki_wheels(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "bundle:add_wheels:qt6", InstallAnkiWheels { venv: PRIMARY_VENV }, )?; @@ -246,13 +246,13 @@ fn install_anki_wheels(build: &mut Build) -> Result<()> { } fn build_pyoxidizer(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "bundle:pyoxidizer:repo", SyncSubmodule { path: "qt/bundle/PyOxidizer", }, )?; - build.add( + build.add_action( "bundle:pyoxidizer:bin", CargoBuild { inputs: inputs![":bundle:pyoxidizer:repo", glob!["qt/bundle/PyOxidizer/**"]], @@ -297,7 +297,7 @@ impl BuildAction for BuildArtifacts { } fn build_artifacts(build: &mut Build) -> Result<()> { - build.add("bundle:artifacts", BuildArtifacts {}) + build.add_action("bundle:artifacts", BuildArtifacts {}) } struct BuildBundle {} @@ -321,7 +321,7 @@ impl BuildAction for BuildBundle { } fn build_binary(build: &mut Build) -> Result<()> { - build.add("bundle:binary", BuildBundle {}) + build.add_action("bundle:binary", BuildBundle {}) } struct BuildDistFolder { @@ -359,7 +359,7 @@ fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> { DistKind::Standard => "bundle:folder:std", DistKind::Alternate => "bundle:folder:alt", }; - build.add(group, BuildDistFolder { kind, deps }) + build.add_action(group, BuildDistFolder { kind, deps }) } fn build_packages(build: &mut Build) -> Result<()> { @@ -409,7 +409,7 @@ impl BuildAction for BuildTarball { fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> { let name = kind.folder_name(); - build.add(format!("bundle:package:{name}"), BuildTarball { kind }) + build.add_action(format!("bundle:package:{name}"), BuildTarball { kind }) } struct BuildWindowsInstallers {} @@ -434,7 +434,7 @@ impl BuildAction for BuildWindowsInstallers { } fn build_windows_installers(build: &mut Build) -> Result<()> { - build.add("bundle:package", BuildWindowsInstallers {}) + build.add_action("bundle:package", BuildWindowsInstallers {}) } struct BuildMacApp { @@ -456,7 +456,7 @@ impl BuildAction for BuildMacApp { } fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> { - build.add(format!("bundle:app:{}", kind.name()), BuildMacApp { kind }) + build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind }) } struct BuildDmgs {} @@ -488,5 +488,5 @@ impl BuildAction for BuildDmgs { } fn build_dmgs(build: &mut Build) -> Result<()> { - build.add("bundle:dmg", BuildDmgs {}) + build.add_action("bundle:dmg", BuildDmgs {}) } diff --git a/build/configure/src/proto.rs b/build/configure/src/proto.rs index 263de30b1..0fcbaadeb 100644 --- a/build/configure/src/proto.rs +++ b/build/configure/src/proto.rs @@ -41,14 +41,14 @@ pub fn setup_protoc(build: &mut Build) -> Result<()> { } pub fn check_proto(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "check:format:proto", ClangFormat { inputs: inputs![glob!["proto/**/*.proto"]], check_only: true, }, )?; - build.add( + build.add_action( "format:proto", ClangFormat { inputs: inputs![glob!["proto/**/*.proto"]], diff --git a/build/configure/src/pylib.rs b/build/configure/src/pylib.rs index 8799349ce..070c100aa 100644 --- a/build/configure/src/pylib.rs +++ b/build/configure/src/pylib.rs @@ -20,21 +20,21 @@ use crate::python::GenPythonProto; pub fn build_pylib(build: &mut Build) -> Result<()> { // generated files - build.add( + build.add_action( "pylib/anki:proto", GenPythonProto { proto_files: inputs![glob!["proto/anki/*.proto"]], }, )?; - build.add( + build.add_action( "pylib/anki:_fluent.py", RunCommand { command: ":pyenv:bin", args: "$script $strings $out", inputs: hashmap! { "script" => inputs!["pylib/tools/genfluent.py"], - "strings" => inputs![":rslib/i18n:strings.json"], + "strings" => inputs![":rslib:i18n:strings.json"], "" => inputs!["pylib/anki/_vendor/stringcase.py"] }, outputs: hashmap! { @@ -42,7 +42,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { }, }, )?; - build.add( + build.add_action( "pylib/anki:hooks_gen.py", RunCommand { command: ":pyenv:bin", @@ -56,7 +56,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { }, }, )?; - build.add( + build.add_action( "pylib/anki:_rsbridge", LinkFile { input: inputs![":pylib/rsbridge"], @@ -69,10 +69,10 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { ), }, )?; - build.add("pylib/anki:buildinfo.py", GenBuildInfo {})?; + build.add_action("pylib/anki:buildinfo.py", GenBuildInfo {})?; // wheel - build.add( + build.add_action( "wheels:anki", BuildWheel { name: "anki", @@ -93,7 +93,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> { pub fn check_pylib(build: &mut Build) -> Result<()> { python_format(build, "pylib", inputs![glob!("pylib/**/*.py")])?; - build.add( + build.add_action( "check:pytest:pylib", PythonTest { folder: "pylib/tests", diff --git a/build/configure/src/python.rs b/build/configure/src/python.rs index a986f8935..c5fb330b6 100644 --- a/build/configure/src/python.rs +++ b/build/configure/src/python.rs @@ -32,7 +32,7 @@ pub fn setup_venv(build: &mut Build) -> Result<()> { "python/requirements.qt6_5.txt", ] }; - build.add( + build.add_action( "pyenv", PythonEnvironment { folder: "pyenv", @@ -57,7 +57,7 @@ pub fn setup_venv(build: &mut Build) -> Result<()> { reqs_qt5 = inputs![reqs_qt5, "python/requirements.win.txt"]; } - build.add( + build.add_action( "pyenv-qt5.15", PythonEnvironment { folder: "pyenv-qt5.15", @@ -66,7 +66,7 @@ pub fn setup_venv(build: &mut Build) -> Result<()> { extra_binary_exports: &[], }, )?; - build.add( + build.add_action( "pyenv-qt5.14", PythonEnvironment { folder: "pyenv-qt5.14", @@ -110,7 +110,7 @@ impl BuildAction for GenPythonProto { build.add_outputs("", python_outputs); // not a direct dependency, but we include the output interface in our declared // outputs - build.add_inputs("", inputs!["rslib/proto"]); + build.add_inputs("", inputs![":rslib:proto"]); build.add_outputs("", vec!["pylib/anki/_backend_generated.py"]); } } @@ -159,7 +159,7 @@ pub fn check_python(build: &mut Build) -> Result<()> { python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?; python_format(build, "tools", inputs![glob!("tools/**/*.py")])?; - build.add( + build.add_action( "check:mypy", PythonTypecheck { folders: &[ @@ -190,7 +190,7 @@ fn add_pylint(build: &mut Build) -> Result<()> { // pylint does not support PEP420 implicit namespaces split across import paths, // so we need to merge our pylib sources and generated files before invoking it, // and add a top-level __init__.py - build.add( + build.add_action( "pylint/anki", RsyncFiles { inputs: inputs![":pylib/anki"], @@ -200,7 +200,7 @@ fn add_pylint(build: &mut Build) -> Result<()> { extra_args: "--links", }, )?; - build.add( + build.add_action( "pylint/anki", RsyncFiles { inputs: inputs![glob!["pylib/anki/**"]], @@ -209,7 +209,7 @@ fn add_pylint(build: &mut Build) -> Result<()> { extra_args: "", }, )?; - build.add( + build.add_action( "pylint/anki", RunCommand { command: ":pyenv:bin", @@ -218,7 +218,7 @@ fn add_pylint(build: &mut Build) -> Result<()> { outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] }, }, )?; - build.add( + build.add_action( "check:pylint", PythonLint { folders: &[ diff --git a/build/configure/src/rust.rs b/build/configure/src/rust.rs index 92467cdb8..e6c8d1efc 100644 --- a/build/configure/src/rust.rs +++ b/build/configure/src/rust.rs @@ -28,21 +28,21 @@ pub fn build_rust(build: &mut Build) -> Result<()> { fn prepare_translations(build: &mut Build) -> Result<()> { // ensure repos are checked out - build.add( + build.add_action( "ftl:repo:core", SyncSubmodule { path: "ftl/core-repo", }, )?; - build.add( + build.add_action( "ftl:repo:qt", SyncSubmodule { path: "ftl/qt-repo", }, )?; // build anki_i18n and spit out strings.json - build.add( - "rslib/i18n", + build.add_action( + "rslib:i18n", CargoBuild { inputs: inputs![ glob!["rslib/i18n/**"], @@ -59,7 +59,7 @@ fn prepare_translations(build: &mut Build) -> Result<()> { }, )?; - build.add( + build.add_action( "ftl:sync", CargoRun { binary_name: "ftl-sync", @@ -69,7 +69,7 @@ fn prepare_translations(build: &mut Build) -> Result<()> { }, )?; - build.add( + build.add_action( "ftl:deprecate", CargoRun { binary_name: "deprecate_ftl_entries", @@ -84,8 +84,8 @@ fn prepare_translations(build: &mut Build) -> Result<()> { fn prepare_proto_descriptors(build: &mut Build) -> Result<()> { // build anki_proto and spit out descriptors/Python interface - build.add( - "rslib/proto", + build.add_action( + "rslib:proto", CargoBuild { inputs: inputs![glob!["{proto,rslib/proto}/**"], "$protoc_binary",], outputs: &[RustOutput::Data( @@ -106,7 +106,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> { } else { "native-tls" }; - build.add( + build.add_action( "pylib/rsbridge", CargoBuild { inputs: inputs![ @@ -114,8 +114,8 @@ fn build_rsbridge(build: &mut Build) -> Result<()> { // declare a dependency on i18n/proto so it gets built first, allowing // things depending on strings.json to build faster, and ensuring // changes to the ftl files trigger a rebuild - ":rslib/i18n", - ":rslib/proto", + ":rslib:i18n", + ":rslib:proto", // when env vars change the build hash gets updated "$builddir/build.ninja", // building on Windows requires python3.lib @@ -140,7 +140,7 @@ pub fn check_rust(build: &mut Build) -> Result<()> { "Cargo.toml", "rust-toolchain.toml", ]; - build.add( + build.add_action( "check:format:rust", CargoFormat { inputs: inputs.clone(), @@ -148,7 +148,7 @@ pub fn check_rust(build: &mut Build) -> Result<()> { working_dir: Some("cargo/format"), }, )?; - build.add( + build.add_action( "format:rust", CargoFormat { inputs: inputs.clone(), @@ -163,13 +163,13 @@ pub fn check_rust(build: &mut Build) -> Result<()> { ":pylib/rsbridge" ]; - build.add( + build.add_action( "check:clippy", CargoClippy { inputs: inputs.clone(), }, )?; - build.add("check:rust_test", CargoTest { inputs })?; + build.add_action("check:rust_test", CargoTest { inputs })?; Ok(()) } @@ -193,7 +193,7 @@ pub fn check_minilints(build: &mut Build) -> Result<()> { } fn on_first_instance(&self, build: &mut Build) -> Result<()> { - build.add( + build.add_action( "build:minilints", CargoBuild { inputs: inputs![glob!("tools/minilints/**/*")], @@ -211,14 +211,14 @@ pub fn check_minilints(build: &mut Build) -> Result<()> { "{node_modules,qt/bundle/PyOxidizer}/**" ]]; - build.add( + build.add_action( "check:minilints", RunMinilints { deps: files.clone(), fix: false, }, )?; - build.add( + build.add_action( "fix:minilints", RunMinilints { deps: files, diff --git a/build/configure/src/web.rs b/build/configure/src/web.rs index 755dc405c..5179b6724 100644 --- a/build/configure/src/web.rs +++ b/build/configure/src/web.rs @@ -1,7 +1,6 @@ // Copyright: Ankitects Pty Ltd and contributors // License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html -// use super::*; use ninja_gen::action::BuildAction; use ninja_gen::command::RunCommand; use ninja_gen::glob; @@ -10,6 +9,7 @@ use ninja_gen::input::BuildInput; use ninja_gen::inputs; use ninja_gen::node::node_archive; use ninja_gen::node::CompileSass; +use ninja_gen::node::CompileTypescript; use ninja_gen::node::DPrint; use ninja_gen::node::EsbuildScript; use ninja_gen::node::Eslint; @@ -47,9 +47,8 @@ fn setup_node(build: &mut Build) -> Result<()> { "sass", "tsc", "tsx", - "pbjs", - "pbts", "jest", + "protoc-gen-es", ], hashmap! { "jquery" => vec![ @@ -116,14 +115,14 @@ fn setup_node(build: &mut Build) -> Result<()> { } fn build_and_check_tslib(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "ts:lib:i18n", RunCommand { command: ":pyenv:bin", args: "$script $strings $out", inputs: hashmap! { "script" => inputs!["ts/lib/genfluent.py"], - "strings" => inputs![":rslib/i18n:strings.json"], + "strings" => inputs![":rslib:i18n:strings.json"], "" => inputs!["pylib/anki/_vendor/stringcase.py"] }, outputs: hashmap! { @@ -136,23 +135,38 @@ fn build_and_check_tslib(build: &mut Build) -> Result<()> { }, }, )?; - build.add( - "ts:lib:backend_proto.d.ts", + build.add_action( + "ts:lib:proto", GenTypescriptProto { - protos: inputs![glob!["proto/anki/*.proto"]], - output_stem: "ts/lib/backend_proto", + protos: inputs![glob!["proto/**/*.proto"]], + include_dirs: &["proto"], + out_dir: "out/ts/lib", + out_path_transform: |path| path.replace("proto/", "ts/lib/"), + py_transform_script: "pylib/tools/markpure.py", + }, + )?; + // ensure _service files are generated by rslib + build.add_dependency("ts:lib:proto", inputs![":rslib:proto"]); + // the generated _service.js files import @tslib/post, and esbuild won't be able + // to import the .ts file, so we need to generate a .js file for it + build.add_action( + "ts:lib:proto", + CompileTypescript { + ts_files: "ts/lib/post.ts".into(), + out_dir: "out/ts/lib", + out_path_transform: |path| path.into(), }, )?; let src_files = inputs![glob!["ts/lib/**"]]; eslint(build, "lib", "ts/lib", inputs![":ts:lib", &src_files])?; - build.add( + build.add_action( "check:jest:lib", jest_test("ts/lib", inputs![":ts:lib", &src_files], true), )?; - build.add_inputs_to_group("ts:lib", src_files); + build.add_dependency("ts:lib", src_files); Ok(()) } @@ -178,11 +192,11 @@ fn declare_and_check_other_libraries(build: &mut Build) -> Result<()> { ] { let library_with_ts = format!("ts:{library}"); let folder = library_with_ts.replace(':', "/"); - build.add_inputs_to_group(&library_with_ts, inputs.clone()); + build.add_dependency(&library_with_ts, inputs.clone()); eslint(build, library, &folder, inputs.clone())?; if matches!(library, "domlib" | "html-filter") { - build.add( + build.add_action( &format!("check:jest:{library}"), jest_test(&folder, inputs, true), )?; @@ -201,7 +215,7 @@ fn declare_and_check_other_libraries(build: &mut Build) -> Result<()> { pub fn eslint(build: &mut Build, name: &str, folder: &str, deps: BuildInput) -> Result<()> { let eslint_rc = inputs![".eslintrc.js"]; - build.add( + build.add_action( format!("check:eslint:{name}"), Eslint { folder, @@ -210,7 +224,7 @@ pub fn eslint(build: &mut Build, name: &str, folder: &str, deps: BuildInput) -> fix: false, }, )?; - build.add( + build.add_action( format!("fix:eslint:{name}"), Eslint { folder, @@ -223,13 +237,13 @@ pub fn eslint(build: &mut Build, name: &str, folder: &str, deps: BuildInput) -> } fn build_and_check_pages(build: &mut Build) -> Result<()> { - build.add_inputs_to_group("ts:tag-editor", inputs![glob!["ts/tag-editor/**"]]); + build.add_dependency("ts:tag-editor", inputs![glob!["ts/tag-editor/**"]]); let mut build_page = |name: &str, html: bool, deps: BuildInput| -> Result<()> { let group = format!("ts:pages:{name}"); let deps = inputs![deps, glob!(format!("ts/{name}/**"))]; let extra_exts = if html { &["css", "html"][..] } else { &["css"] }; - build.add( + build.add_action( &group, EsbuildScript { script: inputs!["ts/bundle_svelte.mjs"], @@ -239,7 +253,7 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> { extra_exts, }, )?; - build.add( + build.add_action( format!("check:svelte:{name}"), SvelteCheck { tsconfig: inputs![format!("ts/{name}/tsconfig.json")], @@ -249,7 +263,7 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> { let folder = format!("ts/{name}"); eslint(build, name, &folder, deps.clone())?; if matches!(name, "deck-options" | "change-notetype") { - build.add( + build.add_action( &format!("check:jest:{name}"), jest_test(&folder, deps, false), )?; @@ -365,7 +379,7 @@ fn build_and_check_editor(build: &mut Build) -> Result<()> { let mut build_editor_page = |name: &str, entrypoint: &str| -> Result<()> { let stem = format!("ts/editor/{name}"); - build.add( + build.add_action( "ts:editor", EsbuildScript { script: inputs!["ts/bundle_svelte.mjs"], @@ -382,7 +396,7 @@ fn build_and_check_editor(build: &mut Build) -> Result<()> { build_editor_page("note_creator", "index_creator")?; let group = "ts/editor"; - build.add( + build.add_action( "check:svelte:editor", SvelteCheck { tsconfig: inputs![format!("{group}/tsconfig.json")], @@ -395,7 +409,7 @@ fn build_and_check_editor(build: &mut Build) -> Result<()> { fn build_and_check_reviewer(build: &mut Build) -> Result<()> { let reviewer_deps = inputs![":ts:lib", glob!("ts/{reviewer,image-occlusion}/**"),]; - build.add( + build.add_action( "ts:reviewer:reviewer.js", EsbuildScript { script: inputs!["ts/bundle_ts.mjs"], @@ -405,7 +419,7 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> { extra_exts: &[], }, )?; - build.add( + build.add_action( "ts:reviewer:reviewer.css", CompileSass { input: inputs!["ts/reviewer/reviewer.scss"], @@ -414,7 +428,7 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> { load_paths: vec!["."], }, )?; - build.add( + build.add_action( "ts:reviewer:reviewer_extras_bundle.js", EsbuildScript { script: inputs!["ts/bundle_ts.mjs"], @@ -425,26 +439,31 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> { }, )?; - build.add( + build.add_action( "check:typescript:reviewer", TypescriptCheck { tsconfig: inputs!["ts/reviewer/tsconfig.json"], inputs: reviewer_deps.clone(), }, )?; - eslint(build, "reviewer", "ts/reviewer", reviewer_deps) + eslint(build, "reviewer", "ts/reviewer", reviewer_deps)?; + build.add_action( + "check:jest:reviewer", + jest_test("ts/reviewer", inputs![":ts:reviewer"], false), + )?; + Ok(()) } fn check_web(build: &mut Build) -> Result<()> { let dprint_files = inputs![glob!["**/*.{ts,mjs,js,md,json,toml,svelte}", "target/**"]]; - build.add( + build.add_action( "check:format:dprint", DPrint { inputs: dprint_files.clone(), check_only: true, }, )?; - build.add( + build.add_action( "format:dprint", DPrint { inputs: dprint_files, @@ -456,14 +475,14 @@ fn check_web(build: &mut Build) -> Result<()> { } pub fn check_sql(build: &mut Build) -> Result<()> { - build.add( + build.add_action( "check:format:sql", SqlFormat { inputs: inputs![glob!["**/*.sql"]], check_only: true, }, )?; - build.add( + build.add_action( "format:sql", SqlFormat { inputs: inputs![glob!["**/*.sql"]], @@ -475,7 +494,7 @@ pub fn check_sql(build: &mut Build) -> Result<()> { fn build_and_check_mathjax(build: &mut Build) -> Result<()> { let files = inputs![glob!["ts/mathjax/*"]]; - build.add( + build.add_action( "ts:mathjax", EsbuildScript { script: "ts/transform_ts.mjs".into(), @@ -486,7 +505,7 @@ fn build_and_check_mathjax(build: &mut Build) -> Result<()> { }, )?; eslint(build, "mathjax", "ts/mathjax", files.clone())?; - build.add( + build.add_action( "check:typescript:mathjax", TypescriptCheck { tsconfig: "ts/mathjax/tsconfig.json".into(), @@ -576,9 +595,9 @@ pub fn copy_mathjax() -> impl BuildAction { } fn build_sass(build: &mut Build) -> Result<()> { - build.add_inputs_to_group("sass", inputs![glob!("sass/**")]); + build.add_dependency("sass", inputs![glob!("sass/**")]); - build.add( + build.add_action( "css:_root-vars", CompileSass { input: inputs!["sass/_root-vars.scss"], diff --git a/build/ninja_gen/src/archives.rs b/build/ninja_gen/src/archives.rs index 50815b75f..70b40e797 100644 --- a/build/ninja_gen/src/archives.rs +++ b/build/ninja_gen/src/archives.rs @@ -160,7 +160,7 @@ where fn build_archive_tool(build: &mut Build) -> Result<()> { build.once_only("build_archive_tool", |build| { let features = Platform::tls_feature(); - build.add( + build.add_action( "build:archives", CargoBuild { inputs: inputs![glob!("build/archives/**/*")], @@ -186,10 +186,10 @@ where I::Item: AsRef, { let download_group = format!("download:{group_name}"); - build.add(&download_group, DownloadArchive { archive })?; + build.add_action(&download_group, DownloadArchive { archive })?; let extract_group = format!("extract:{group_name}"); - build.add( + build.add_action( extract_group, ExtractArchive { archive_path: inputs![format!(":{download_group}")], diff --git a/build/ninja_gen/src/build.rs b/build/ninja_gen/src/build.rs index dc88c1e6f..bac5a8b9c 100644 --- a/build/ninja_gen/src/build.rs +++ b/build/ninja_gen/src/build.rs @@ -49,7 +49,7 @@ impl Build { groups: Default::default(), }; - build.add("build:run_configure", ConfigureBuild {})?; + build.add_action("build:run_configure", ConfigureBuild {})?; Ok(build) } @@ -76,7 +76,7 @@ impl Build { } } - pub fn add(&mut self, group: impl AsRef, action: impl BuildAction) -> Result<()> { + pub fn add_action(&mut self, group: impl AsRef, action: impl BuildAction) -> Result<()> { let group = group.as_ref(); let groups = split_groups(group); let group = groups[0]; @@ -104,7 +104,7 @@ impl Build { BuildStatement::from_build_action(group, action, &self.groups, self.release); if first_invocation { - let command = statement.prepare_command(command); + let command = statement.prepare_command(command)?; writeln!( &mut self.output_text, "\ @@ -130,8 +130,9 @@ rule {action_name} Ok(()) } - /// Add one or more resolved files to a group. - pub fn add_resolved_files_to_group<'a>( + /// Add one or more resolved files to a group. Does not add to the parent + /// groups; that must be done by the caller. + fn add_resolved_files_to_group<'a>( &mut self, group: &str, files: impl IntoIterator, @@ -140,17 +141,15 @@ rule {action_name} buf.extend(files.into_iter().map(ToString::to_string)); } - pub fn add_inputs_to_group(&mut self, group: &str, inputs: BuildInput) { - self.add_resolved_files_to_group(group, &self.expand_inputs(inputs)); - } - - /// Group names should not have a leading `:`. - pub fn add_group_to_group(&mut self, target_group: &str, additional_group: &str) { - let additional_files = self - .groups - .get(additional_group) - .unwrap_or_else(|| panic!("{additional_group} had no files")); - self.add_resolved_files_to_group(target_group, &additional_files.clone()) + /// Allows you to add dependencies on files or build steps that aren't + /// required to build the group itself, but are required by consumers of + /// that group. + pub fn add_dependency(&mut self, group: &str, deps: BuildInput) { + let files = self.expand_inputs(deps); + let groups = split_groups(group); + for group in groups { + self.add_resolved_files_to_group(group, &files); + } } /// Outputs from a given build statement group. An error if no files have @@ -215,6 +214,7 @@ struct BuildStatement<'a> { output_stamp: bool, env_vars: Vec, working_dir: Option, + create_dirs: Vec, release: bool, bypass_runner: bool, } @@ -239,6 +239,7 @@ impl BuildStatement<'_> { output_stamp: false, env_vars: Default::default(), working_dir: None, + create_dirs: Default::default(), release, bypass_runner: action.bypass_runner(), }; @@ -281,28 +282,29 @@ impl BuildStatement<'_> { (outputs_vec, self.output_subsets) } - fn prepare_command(&mut self, command: String) -> String { + fn prepare_command(&mut self, command: String) -> Result { if self.bypass_runner { - return command; + return Ok(command); } if command.starts_with("$runner") { self.implicit_inputs.push("$runner".into()); - return command; + return Ok(command); } let mut buf = String::from("$runner run "); if self.output_stamp { - write!(&mut buf, "--stamp=$stamp ").unwrap(); + write!(&mut buf, "--stamp=$stamp ")?; } - if !self.env_vars.is_empty() { - for var in &self.env_vars { - write!(&mut buf, "--env={var} ").unwrap(); - } + for var in &self.env_vars { + write!(&mut buf, "--env=\"{var}\" ")?; + } + for dir in &self.create_dirs { + write!(&mut buf, "--mkdir={dir} ")?; } if let Some(working_dir) = &self.working_dir { - write!(&mut buf, "--cwd={working_dir} ").unwrap(); + write!(&mut buf, "--cwd={working_dir} ")?; } buf.push_str(&command); - buf + Ok(buf) } } @@ -370,6 +372,10 @@ pub trait FilesHandle { /// for each command, `constant_value` should reference a `$variable` you /// have defined. fn set_working_dir(&mut self, constant_value: &str); + /// Ensure provided folder and parent folders are created before running + /// the command. Can be called multiple times. Defines a variable pointing + /// at the folder. + fn create_dir_all(&mut self, key: &str, path: impl Into); fn release_build(&self) -> bool; } @@ -462,6 +468,12 @@ impl FilesHandle for BuildStatement<'_> { fn set_working_dir(&mut self, constant_value: &str) { self.working_dir = Some(constant_value.to_owned()); } + + fn create_dir_all(&mut self, key: &str, path: impl Into) { + let path = path.into(); + self.add_variable(key, &path); + self.create_dirs.push(path); + } } fn to_ninja_target_string(explicit: &[String], implicit: &[String]) -> String { diff --git a/build/ninja_gen/src/cargo.rs b/build/ninja_gen/src/cargo.rs index a782155b4..42e48ab12 100644 --- a/build/ninja_gen/src/cargo.rs +++ b/build/ninja_gen/src/cargo.rs @@ -137,7 +137,7 @@ impl BuildAction for CargoTest { } fn on_first_instance(&self, build: &mut Build) -> Result<()> { - build.add( + build.add_action( "cargo-nextest", CargoInstall { binary_name: "cargo-nextest", diff --git a/build/ninja_gen/src/configure.rs b/build/ninja_gen/src/configure.rs index 97f9aabed..50f55722b 100644 --- a/build/ninja_gen/src/configure.rs +++ b/build/ninja_gen/src/configure.rs @@ -25,7 +25,7 @@ impl BuildAction for ConfigureBuild { } fn on_first_instance(&self, build: &mut Build) -> Result<()> { - build.add( + build.add_action( "build:configure", CargoBuild { inputs: inputs![glob!["build/**/*"]], diff --git a/build/ninja_gen/src/node.rs b/build/ninja_gen/src/node.rs index acf9a0804..0b64ce086 100644 --- a/build/ninja_gen/src/node.rs +++ b/build/ninja_gen/src/node.rs @@ -4,6 +4,8 @@ use std::borrow::Cow; use std::collections::HashMap; +use itertools::Itertools; + use super::*; use crate::action::BuildAction; use crate::archives::download_and_extract; @@ -135,10 +137,10 @@ pub fn setup_node( Utf8Path::new(&path).is_absolute(), "YARN_BINARY must be absolute" ); - build.add_resolved_files_to_group("yarn:bin", &vec![path]); + build.add_dependency("yarn:bin", inputs![path]); } Err(_) => { - build.add("yarn", YarnSetup {})?; + build.add_action("yarn", YarnSetup {})?; } }; @@ -148,7 +150,7 @@ pub fn setup_node( vec![format!(".bin/{}", with_cmd_ext(binary)).into()], ); } - build.add( + build.add_action( "node_modules", YarnInstall { package_json_and_lock: inputs!["yarn.lock", "package.json"], @@ -326,30 +328,60 @@ impl BuildAction for SqlFormat { } } -pub struct GenTypescriptProto { +pub struct GenTypescriptProto<'a> { pub protos: BuildInput, - /// .js and .d.ts will be added to it - pub output_stem: &'static str, + pub include_dirs: &'a [&'a str], + /// Automatically created. + pub out_dir: &'a str, + /// Can be used to adjust the output js/dts files to point to out_dir. + pub out_path_transform: fn(&str) -> String, + /// Script to apply modifications to the generated files. + pub py_transform_script: &'static str, } -impl BuildAction for GenTypescriptProto { +impl BuildAction for GenTypescriptProto<'_> { fn command(&self) -> &str { - "$pbjs --target=static-module --wrap=default --force-number --force-message --out=$static $in && $ - $pbjs --target=json-module --wrap=default --force-number --force-message --out=$js $in && $ - $pbts --out=$dts $static && $ - rm $static" + "$protoc $includes $in \ + --plugin $gen-es --es_out $out_dir && \ + $pyenv_bin $script $out_dir" } fn files(&mut self, build: &mut impl build::FilesHandle) { - build.add_inputs("pbjs", inputs![":node_modules:pbjs"]); - build.add_inputs("pbts", inputs![":node_modules:pbts"]); - build.add_inputs("in", &self.protos); - build.add_inputs("", inputs!["yarn.lock"]); + let proto_files = build.expand_inputs(&self.protos); + let output_files: Vec<_> = proto_files + .iter() + .flat_map(|f| { + let js_path = f.replace(".proto", "_pb.js"); + let dts_path = f.replace(".proto", "_pb.d.ts"); + [ + (self.out_path_transform)(&js_path), + (self.out_path_transform)(&dts_path), + ] + }) + .collect(); - let stem = self.output_stem; - build.add_variable("static", format!("$builddir/{stem}_static.js")); - build.add_outputs("js", vec![format!("{stem}.js")]); - build.add_outputs("dts", vec![format!("{stem}.d.ts")]); + build.create_dir_all("out_dir", self.out_dir); + build.add_variable( + "includes", + self.include_dirs + .iter() + .map(|d| format!("-I {d}")) + .join(" "), + ); + build.add_inputs("protoc", inputs![":extract:protoc:bin"]); + build.add_inputs("gen-es", inputs![":node_modules:protoc-gen-es"]); + if cfg!(windows) { + build.add_env_var( + "PATH", + &format!("node_modules/.bin;{}", std::env::var("PATH").unwrap()), + ); + } + build.add_inputs_vec("in", proto_files); + build.add_inputs("", inputs!["yarn.lock"]); + build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]); + build.add_inputs("script", inputs![self.py_transform_script]); + + build.add_outputs("", output_files); } } @@ -376,3 +408,43 @@ impl BuildAction for CompileSass<'_> { build.add_outputs("out", vec![self.output]); } } + +/// Usually we rely on esbuild to transpile our .ts files on the fly, but when +/// we want generated code to be able to import a .ts file, we need to use +/// typescript to generate .js/.d.ts files, or types can't be looked up, and +/// esbuild can't find the file to bundle. +pub struct CompileTypescript<'a> { + pub ts_files: BuildInput, + /// Automatically created. + pub out_dir: &'a str, + /// Can be used to adjust the output js/dts files to point to out_dir. + pub out_path_transform: fn(&str) -> String, +} + +impl BuildAction for CompileTypescript<'_> { + fn command(&self) -> &str { + "$tsc $in --outDir $out_dir -d --skipLibCheck" + } + + fn files(&mut self, build: &mut impl build::FilesHandle) { + build.add_inputs("tsc", inputs![":node_modules:tsc"]); + build.add_inputs("in", &self.ts_files); + build.add_inputs("", inputs!["yarn.lock"]); + + let ts_files = build.expand_inputs(&self.ts_files); + let output_files: Vec<_> = ts_files + .iter() + .flat_map(|f| { + let js_path = f.replace(".ts", ".js"); + let dts_path = f.replace(".ts", ".d.ts"); + [ + (self.out_path_transform)(&js_path), + (self.out_path_transform)(&dts_path), + ] + }) + .collect(); + + build.create_dir_all("out_dir", self.out_dir); + build.add_outputs("", output_files); + } +} diff --git a/build/ninja_gen/src/python.rs b/build/ninja_gen/src/python.rs index 736812ffe..e95346054 100644 --- a/build/ninja_gen/src/python.rs +++ b/build/ninja_gen/src/python.rs @@ -178,7 +178,7 @@ impl BuildAction for PythonFormat<'_> { pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> { let isort_ini = &inputs![".isort.cfg"]; - build.add( + build.add_action( &format!("check:format:python:{group}"), PythonFormat { inputs: &inputs, @@ -187,7 +187,7 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu }, )?; - build.add( + build.add_action( &format!("format:python:{group}"), PythonFormat { inputs: &inputs, diff --git a/build/ninja_gen/src/sass.rs b/build/ninja_gen/src/sass.rs index d35ecc6c0..a91a60e19 100644 --- a/build/ninja_gen/src/sass.rs +++ b/build/ninja_gen/src/sass.rs @@ -32,7 +32,7 @@ impl BuildAction for CompileSassWithGrass { } fn on_first_instance(&self, build: &mut Build) -> Result<()> { - build.add( + build.add_action( "grass", CargoInstall { binary_name: "grass", diff --git a/build/runner/Cargo.toml b/build/runner/Cargo.toml index e4b459f98..e52aeb93f 100644 --- a/build/runner/Cargo.toml +++ b/build/runner/Cargo.toml @@ -9,8 +9,11 @@ license.workspace = true rust-version.workspace = true [dependencies] +anki_io = { version = "0.0.0", path = "../../rslib/io" } +anyhow = "1.0.71" camino = "1.1.4" clap = { version = "4.2.1", features = ["derive"] } +itertools = "0.10.5" junction = "1.0.0" termcolor = "1.2.0" workspace-hack = { version = "0.1", path = "../../tools/workspace-hack" } diff --git a/build/runner/src/main.rs b/build/runner/src/main.rs index 63635f261..d044af95a 100644 --- a/build/runner/src/main.rs +++ b/build/runner/src/main.rs @@ -13,8 +13,7 @@ mod rsync; mod run; mod yarn; -use std::error::Error; - +use anyhow::Result; use build::run_build; use build::BuildArgs; use bundle::artifacts::build_artifacts; @@ -33,8 +32,6 @@ use run::RunArgs; use yarn::setup_yarn; use yarn::YarnArgs; -pub type Result> = std::result::Result; - #[derive(Parser)] struct Cli { #[command(subcommand)] @@ -53,10 +50,10 @@ enum Command { BuildDistFolder(BuildDistFolderArgs), } -fn main() { +fn main() -> Result<()> { match Cli::parse().command { Command::Pyenv(args) => setup_pyenv(args), - Command::Run(args) => run_commands(args), + Command::Run(args) => run_commands(args)?, Command::Rsync(args) => rsync_files(args), Command::Yarn(args) => setup_yarn(args), Command::Build(args) => run_build(args), @@ -64,4 +61,5 @@ fn main() { Command::BuildBundleBinary => build_bundle_binary(), Command::BuildDistFolder(args) => build_dist_folder(args), }; + Ok(()) } diff --git a/build/runner/src/run.rs b/build/runner/src/run.rs index e9fcc3f31..24825c820 100644 --- a/build/runner/src/run.rs +++ b/build/runner/src/run.rs @@ -5,6 +5,9 @@ use std::io::ErrorKind; use std::process::Command; use std::process::Output; +use anki_io::create_dir_all; +use anki_io::write_file; +use anyhow::Result; use clap::Args; #[derive(Args)] @@ -15,20 +18,26 @@ pub struct RunArgs { env: Vec<(String, String)>, #[arg(long)] cwd: Option, + #[arg(long)] + mkdir: Vec, #[arg(trailing_var_arg = true)] args: Vec, } /// Run one or more commands separated by `&&`, optionally stamping or setting /// extra env vars. -pub fn run_commands(args: RunArgs) { +pub fn run_commands(args: RunArgs) -> Result<()> { let commands = split_args(args.args); + for dir in args.mkdir { + create_dir_all(&dir)?; + } for command in commands { run_silent(&mut build_command(command, &args.env, &args.cwd)); } if let Some(stamp_file) = args.stamp { - std::fs::write(stamp_file, b"").expect("unable to write stamp file"); + write_file(stamp_file, b"")?; } + Ok(()) } fn split_env(s: &str) -> Result<(String, String), std::io::Error> { diff --git a/docs/protobuf.md b/docs/protobuf.md index bae6125b5..f83c9fd4f 100644 --- a/docs/protobuf.md +++ b/docs/protobuf.md @@ -100,13 +100,9 @@ Protobuf has an official Python implementation with an extensive [reference](htt ### Typescript -Anki uses [protobuf.js](https://protobufjs.github.io/protobuf.js/), which offers +Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers some documentation. -- If using a message `Foo` as a type, make sure not to use the generated interface - `IFoo` instead. Their definitions are very similar, but the interface requires - null checks for every field. - ### Rust Anki uses the [prost crate](https://docs.rs/prost/latest/prost/). diff --git a/package.json b/package.json index 14d2454e7..36885f5da 100644 --- a/package.json +++ b/package.json @@ -6,6 +6,7 @@ "license": "AGPL-3.0-or-later", "description": "Anki JS support files", "devDependencies": { + "@bufbuild/protoc-gen-es": "^1.2.1", "@pyoner/svelte-types": "^3.4.4-2", "@sqltools/formatter": "^1.2.2", "@types/bootstrap": "^5.0.12", @@ -21,47 +22,34 @@ "@typescript-eslint/eslint-plugin": "^4.22.0", "@typescript-eslint/parser": "^4.22.0", "caniuse-lite": "^1.0.30001431", - "chalk": "^4.1.0", "cross-env": "^7.0.2", "diff": "^5.0.0", "dprint": "^0.32.2", "esbuild": "^0.15.13", "esbuild-sass-plugin": "2", "esbuild-svelte": "^0.7.1", - "escodegen": "^2.0.0", "eslint": "^7.24.0", "eslint-plugin-compat": "^3.13.0", "eslint-plugin-import": "^2.25.4", "eslint-plugin-simple-import-sort": "^7.0.0", "eslint-plugin-svelte3": "^3.4.0", - "espree": "^9.0.0", - "estraverse": "^5.2.0", - "glob": "^7.1.6", "jest-cli": "^28.0.0-alpha.5", "jest-environment-jsdom": "^28.0.0-alpha.5", "license-checker-rseidelsohn": "^2.1.1", - "minimist": "^1.2.5", - "patch-package": "^6.4.7", "prettier": "2.4.1", "prettier-plugin-svelte": "2.6.0", - "protobufjs-cli": "^1.0.2", "sass": "1.43.5", - "semver": "^7.3.4", "svelte": "^3.25.0", "svelte-check": "^2.2.6", "svelte-preprocess": "^5.0.3", "svelte-preprocess-esbuild": "^3.0.1", "svelte2tsx": "^0.4.6", - "tmp": "^0.2.1", "tslib": "^2.0.3", "tsx": "^3.12.0", - "typescript": "^5.0.4", - "uglify-js": "^3.13.1" - }, - "scripts": { - "postinstall": "patch-package --patch-dir ts/patches" + "typescript": "^5.0.4" }, "dependencies": { + "@bufbuild/protobuf": "^1.2.1", "@floating-ui/dom": "^0.3.0", "@fluent/bundle": "^0.17.0", "@mdi/svg": "^7.0.96", @@ -83,13 +71,9 @@ "lodash-es": "^4.17.21", "marked": "^4.0.0", "mathjax": "^3.1.2", - "panzoom": "^9.4.3", - "protobufjs": "^7" + "panzoom": "^9.4.3" }, "resolutions": { - "jsdoc/marked": "^4.0.0", - "jsdoc/markdown-it": "^12.3.2", - "protobufjs": "^7", "sass": "=1.45.0", "caniuse-lite": "^1.0.30001431" }, diff --git a/proto/anki/image_occlusion.proto b/proto/anki/image_occlusion.proto index 34dcaa580..2e4032780 100644 --- a/proto/anki/image_occlusion.proto +++ b/proto/anki/image_occlusion.proto @@ -7,9 +7,7 @@ option java_multiple_files = true; package anki.image_occlusion; -import "anki/cards.proto"; import "anki/collection.proto"; -import "anki/notes.proto"; import "anki/generic.proto"; service ImageOcclusionService { diff --git a/proto/anki/scheduler.proto b/proto/anki/scheduler.proto index 9aea1069b..ac78654ff 100644 --- a/proto/anki/scheduler.proto +++ b/proto/anki/scheduler.proto @@ -38,6 +38,13 @@ service SchedulerService { rpc SortCards(SortCardsRequest) returns (collection.OpChangesWithCount); rpc SortDeck(SortDeckRequest) returns (collection.OpChangesWithCount); rpc GetSchedulingStates(cards.CardId) returns (SchedulingStates); + // This should be implemented by the frontend, and should return the values + // from the reviewer. The backend method will throw an error. + rpc GetSchedulingStatesWithContext(generic.Empty) + returns (SchedulingStatesWithContext); + // This should be implemented by the frontend, and should update the state + // data in the reviewer. The backend method will throw an error. + rpc SetSchedulingStates(SetSchedulingStatesRequest) returns (generic.Empty); rpc DescribeNextStates(SchedulingStates) returns (generic.StringList); rpc StateIsLeech(SchedulingState) returns (generic.Bool); rpc UpgradeScheduler(generic.Empty) returns (generic.Empty); @@ -67,7 +74,7 @@ message SchedulingState { Learning learning = 2; } message Normal { - oneof value { + oneof kind { New new = 1; Learning learning = 2; Review review = 3; @@ -82,13 +89,13 @@ message SchedulingState { Normal original_state = 1; } message Filtered { - oneof value { + oneof kind { Preview preview = 1; ReschedulingFilter rescheduling = 2; } } - oneof value { + oneof kind { Normal normal = 1; Filtered filtered = 2; } @@ -318,3 +325,8 @@ message RepositionDefaultsResponse { bool random = 1; bool shift = 2; } + +message SetSchedulingStatesRequest { + string key = 1; + SchedulingStates states = 2; +} diff --git a/pylib/anki/scheduler/v3.py b/pylib/anki/scheduler/v3.py index 38af43a90..b3de4faf0 100644 --- a/pylib/anki/scheduler/v3.py +++ b/pylib/anki/scheduler/v3.py @@ -32,6 +32,7 @@ SchedulingState = scheduler_pb2.SchedulingState SchedulingStates = scheduler_pb2.SchedulingStates SchedulingContext = scheduler_pb2.SchedulingContext SchedulingStatesWithContext = scheduler_pb2.SchedulingStatesWithContext +SetSchedulingStatesRequest = scheduler_pb2.SetSchedulingStatesRequest CardAnswer = scheduler_pb2.CardAnswer @@ -182,7 +183,7 @@ class Scheduler(SchedulerBaseWithLegacy): # fixme: move these into tests_schedv2 in the future def _interval_for_state(self, state: scheduler_pb2.SchedulingState) -> int: - kind = state.WhichOneof("value") + kind = state.WhichOneof("kind") if kind == "normal": return self._interval_for_normal_state(state.normal) elif kind == "filtered": @@ -194,7 +195,7 @@ class Scheduler(SchedulerBaseWithLegacy): def _interval_for_normal_state( self, normal: scheduler_pb2.SchedulingState.Normal ) -> int: - kind = normal.WhichOneof("value") + kind = normal.WhichOneof("kind") if kind == "new": return 0 elif kind == "review": @@ -210,7 +211,7 @@ class Scheduler(SchedulerBaseWithLegacy): def _interval_for_filtered_state( self, filtered: scheduler_pb2.SchedulingState.Filtered ) -> int: - kind = filtered.WhichOneof("value") + kind = filtered.WhichOneof("kind") if kind == "preview": return filtered.preview.scheduled_secs elif kind == "rescheduling": diff --git a/pylib/tools/markpure.py b/pylib/tools/markpure.py new file mode 100644 index 000000000..cdf1f809f --- /dev/null +++ b/pylib/tools/markpure.py @@ -0,0 +1,28 @@ +# Copyright: Ankitects Pty Ltd and contributors +# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +import os +import re +import sys + +root = sys.argv[1] + +type_re = re.compile(r'(make(Enum|MessageType))\(\n\s+".*",') +for dirpath, dirnames, filenames in os.walk(root): + for filename in filenames: + if filename.endswith(".js"): + file = os.path.join(dirpath, filename) + with open(file, "r", encoding="utf8") as f: + contents = f.read() + + # allow tree shaking on proto messages + contents = contents.replace( + "= proto3.make", "= /* @__PURE__ */ proto3.make" + ) + # strip out typeName info, which appears to only be required for + # certain JSON functionality (though this only saves a few hundred + # bytes) + contents = type_re.sub('\\1("",', contents) + + with open(file, "w", encoding="utf8") as f: + f.write(contents) diff --git a/qt/aqt/mediasrv.py b/qt/aqt/mediasrv.py index 92f20a328..f034c0792 100644 --- a/qt/aqt/mediasrv.py +++ b/qt/aqt/mediasrv.py @@ -27,8 +27,7 @@ import aqt.operations from anki import hooks from anki.collection import OpChanges from anki.decks import UpdateDeckConfigs -from anki.scheduler.v3 import SchedulingStatesWithContext -from anki.scheduler_pb2 import SchedulingStates +from anki.scheduler.v3 import SchedulingStatesWithContext, SetSchedulingStatesRequest from anki.utils import dev_mode from aqt.changenotetype import ChangeNotetypeDialog from aqt.deckoptions import DeckOptionsDialog @@ -416,10 +415,9 @@ def get_scheduling_states_with_context() -> bytes: def set_scheduling_states() -> bytes: - key = request.headers.get("key", "") - states = SchedulingStates() + states = SetSchedulingStatesRequest() states.ParseFromString(request.data) - aqt.mw.reviewer.set_scheduling_states(key, states) + aqt.mw.reviewer.set_scheduling_states(states) return b"" diff --git a/qt/aqt/reviewer.py b/qt/aqt/reviewer.py index 31c5ec1b5..89f1d05de 100644 --- a/qt/aqt/reviewer.py +++ b/qt/aqt/reviewer.py @@ -9,7 +9,7 @@ import random import re from dataclasses import dataclass from enum import Enum, auto -from typing import Any, Callable, Literal, Match, Sequence, cast +from typing import Any, Literal, Match, Sequence, cast import aqt import aqt.browser @@ -20,7 +20,11 @@ from anki.collection import Config, OpChanges, OpChangesWithCount from anki.scheduler.base import ScheduleCardsAsNew from anki.scheduler.v3 import CardAnswer, QueuedCards from anki.scheduler.v3 import Scheduler as V3Scheduler -from anki.scheduler.v3 import SchedulingContext, SchedulingStates +from anki.scheduler.v3 import ( + SchedulingContext, + SchedulingStates, + SetSchedulingStatesRequest, +) from anki.tags import MARKED_TAG from anki.types import assert_exhaustive from aqt import AnkiQt, gui_hooks @@ -276,12 +280,12 @@ class Reviewer: return v3.context return None - def set_scheduling_states(self, key: str, states: SchedulingStates) -> None: - if key != self._state_mutation_key: + def set_scheduling_states(self, request: SetSchedulingStatesRequest) -> None: + if request.key != self._state_mutation_key: return if v3 := self._v3: - v3.states = states + v3.states = request.states def _run_state_mutation_hook(self) -> None: def on_eval(result: Any) -> None: diff --git a/rslib/proto/build.rs b/rslib/proto/build.rs index c5cb66abe..f4141084a 100644 --- a/rslib/proto/build.rs +++ b/rslib/proto/build.rs @@ -3,6 +3,8 @@ pub mod python; pub mod rust; +pub mod ts; +pub mod utils; use std::env; use std::path::PathBuf; @@ -15,5 +17,7 @@ fn main() -> Result<()> { let pool = rust::write_backend_proto_rs(&descriptors_path)?; python::write_python_interface(&pool)?; + ts::write_ts_interface(&pool)?; + Ok(()) } diff --git a/rslib/proto/ts.rs b/rslib/proto/ts.rs new file mode 100644 index 000000000..46540c27f --- /dev/null +++ b/rslib/proto/ts.rs @@ -0,0 +1,204 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::collections::HashSet; +use std::fmt::Write as WriteFmt; +use std::io::BufWriter; +use std::io::Write; +use std::path::Path; + +use anki_io::create_dir_all; +use anki_io::create_file; +use anyhow::Result; +use inflections::Inflect; +use prost_reflect::DescriptorPool; +use prost_reflect::MethodDescriptor; +use prost_reflect::ServiceDescriptor; + +use crate::utils::Comments; + +pub(crate) fn write_ts_interface(pool: &DescriptorPool) -> Result<()> { + let root = Path::new("../../out/ts/lib/anki"); + create_dir_all(root)?; + + for service in pool.services() { + if service.name() == "AnkidroidService" { + continue; + } + let service_name = service.name().replace("Service", "").to_snake_case(); + let comments = Comments::from_file(service.parent_file().file_descriptor_proto()); + + write_dts_file(root, &service_name, &service, &comments)?; + write_js_file(root, &service_name, &service, &comments)?; + } + + Ok(()) +} + +fn write_dts_file( + root: &Path, + service_name: &str, + service: &ServiceDescriptor, + comments: &Comments, +) -> Result<()> { + let output_path = root.join(format!("{service_name}_service.d.ts")); + let mut out = BufWriter::new(create_file(output_path)?); + write_dts_header(&mut out)?; + + let mut referenced_packages = HashSet::new(); + let mut method_text = String::new(); + for method in service.methods() { + let method = MethodDetails::from_descriptor(&method, comments); + record_referenced_type(&mut referenced_packages, &method.input_type)?; + record_referenced_type(&mut referenced_packages, &method.output_type)?; + write_dts_method(&method, &mut method_text)?; + } + + write_imports(referenced_packages, &mut out)?; + write!(out, "{}", method_text)?; + Ok(()) +} + +fn write_dts_header(out: &mut impl std::io::Write) -> Result<()> { + out.write_all( + br#"// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; https://www.gnu.org/licenses/agpl.html + +import type { PlainMessage } from "@bufbuild/protobuf"; +import type { PostProtoOptions } from "../post"; +"#, + )?; + Ok(()) +} + +fn write_imports(referenced_packages: HashSet, out: &mut impl Write) -> Result<()> { + for package in referenced_packages { + writeln!( + out, + "import * as {} from \"./{}_pb\";", + package, + package.to_snake_case() + )?; + } + Ok(()) +} + +fn write_dts_method( + MethodDetails { + method_name, + input_type, + output_type, + comments, + }: &MethodDetails, + out: &mut String, +) -> Result<()> { + let comments = format_comments(comments); + writeln!( + out, + r#"{comments}export declare function {method_name}(input: PlainMessage<{input_type}>, options?: PostProtoOptions): Promise<{output_type}>;"# + )?; + Ok(()) +} + +fn write_js_file( + root: &Path, + service_name: &str, + service: &ServiceDescriptor, + comments: &Comments, +) -> Result<()> { + let output_path = root.join(format!("{service_name}_service.js")); + let mut out = BufWriter::new(create_file(output_path)?); + write_js_header(&mut out)?; + + let mut referenced_packages = HashSet::new(); + let mut method_text = String::new(); + for method in service.methods() { + let method = MethodDetails::from_descriptor(&method, comments); + record_referenced_type(&mut referenced_packages, &method.input_type)?; + record_referenced_type(&mut referenced_packages, &method.output_type)?; + write_js_method(&method, &mut method_text)?; + } + + write_imports(referenced_packages, &mut out)?; + write!(out, "{}", method_text)?; + Ok(()) +} + +fn write_js_header(out: &mut impl std::io::Write) -> Result<()> { + out.write_all( + br#"// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; https://www.gnu.org/licenses/agpl.html + +import { postProto } from "../post"; +"#, + )?; + Ok(()) +} + +fn write_js_method( + MethodDetails { + method_name, + input_type, + output_type, + .. + }: &MethodDetails, + out: &mut String, +) -> Result<()> { + write!( + out, + r#"export async function {method_name}(input, options = {{}}) {{ + return await postProto("{method_name}", new {input_type}(input), {output_type}, options); +}} +"# + )?; + Ok(()) +} + +fn format_comments(comments: &Option) -> String { + comments + .as_ref() + .map(|s| format!("/** {s} */\n")) + .unwrap_or_default() +} + +struct MethodDetails { + method_name: String, + input_type: String, + output_type: String, + comments: Option, +} + +impl MethodDetails { + fn from_descriptor(method: &MethodDescriptor, comments: &Comments) -> MethodDetails { + let name = method.name().to_camel_case(); + let input_type = full_name_to_imported_reference(method.input().full_name()); + let output_type = full_name_to_imported_reference(method.output().full_name()); + let comments = comments.get_for_path(method.path()); + Self { + method_name: name, + input_type, + output_type, + comments: comments.map(ToString::to_string), + } + } +} + +fn record_referenced_type( + referenced_packages: &mut HashSet, + type_name: &str, +) -> Result<()> { + referenced_packages.insert(type_name.split('.').next().unwrap().to_string()); + Ok(()) +} + +// e.g. anki.import_export.ImportResponse -> +// importExport.ImportResponse +fn full_name_to_imported_reference(name: &str) -> String { + let mut name = name.splitn(3, '.'); + name.next().unwrap(); + format!( + "{}.{}", + name.next().unwrap().to_camel_case(), + name.next().unwrap() + ) +} diff --git a/rslib/proto/utils.rs b/rslib/proto/utils.rs new file mode 100644 index 000000000..91c622c70 --- /dev/null +++ b/rslib/proto/utils.rs @@ -0,0 +1,45 @@ +// Copyright: Ankitects Pty Ltd and contributors +// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html + +use std::collections::HashMap; + +use prost_types::FileDescriptorProto; + +#[derive(Debug)] +pub struct Comments { + path_map: HashMap, String>, +} + +impl Comments { + pub fn from_file(file: &FileDescriptorProto) -> Self { + Self { + path_map: file + .source_code_info + .as_ref() + .unwrap() + .location + .iter() + .map(|l| { + ( + l.path.clone(), + format!( + "{}{}", + l.leading_detached_comments.join("\n").trim(), + l.leading_comments().trim() + ), + ) + }) + .collect(), + } + } + + pub fn get_for_path(&self, path: &[i32]) -> Option<&str> { + self.path_map.get(path).map(|s| s.as_str()).and_then(|s| { + if s.is_empty() { + None + } else { + Some(s) + } + }) + } +} diff --git a/rslib/src/backend/scheduler/mod.rs b/rslib/src/backend/scheduler/mod.rs index 58cbb77f5..2a3d8afc6 100644 --- a/rslib/src/backend/scheduler/mod.rs +++ b/rslib/src/backend/scheduler/mod.rs @@ -5,8 +5,11 @@ mod answering; mod states; use anki_proto::generic; +use anki_proto::generic::Empty; use anki_proto::scheduler; pub(super) use anki_proto::scheduler::scheduler_service::Service as SchedulerService; +use anki_proto::scheduler::SchedulingStatesWithContext; +use anki_proto::scheduler::SetSchedulingStatesRequest; use super::Backend; use crate::prelude::*; @@ -264,4 +267,18 @@ impl SchedulerService for Backend { ) -> Result { self.with_col(|col| col.custom_study_defaults(input.deck_id.into())) } + + fn get_scheduling_states_with_context( + &self, + _input: Empty, + ) -> std::result::Result { + invalid_input!("the frontend should implement this") + } + + fn set_scheduling_states( + &self, + _input: SetSchedulingStatesRequest, + ) -> std::result::Result { + invalid_input!("the frontend should implement this") + } } diff --git a/rslib/src/backend/scheduler/states/filtered.rs b/rslib/src/backend/scheduler/states/filtered.rs index a47baf722..26cfb4ae0 100644 --- a/rslib/src/backend/scheduler/states/filtered.rs +++ b/rslib/src/backend/scheduler/states/filtered.rs @@ -6,12 +6,12 @@ use crate::scheduler::states::FilteredState; impl From for anki_proto::scheduler::scheduling_state::Filtered { fn from(state: FilteredState) -> Self { anki_proto::scheduler::scheduling_state::Filtered { - value: Some(match state { + kind: Some(match state { FilteredState::Preview(state) => { - anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state.into()) + anki_proto::scheduler::scheduling_state::filtered::Kind::Preview(state.into()) } FilteredState::Rescheduling(state) => { - anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling( + anki_proto::scheduler::scheduling_state::filtered::Kind::Rescheduling( state.into(), ) } @@ -22,13 +22,13 @@ impl From for anki_proto::scheduler::scheduling_state::Filtered { impl From for FilteredState { fn from(state: anki_proto::scheduler::scheduling_state::Filtered) -> Self { - match state.value.unwrap_or_else(|| { - anki_proto::scheduler::scheduling_state::filtered::Value::Preview(Default::default()) + match state.kind.unwrap_or_else(|| { + anki_proto::scheduler::scheduling_state::filtered::Kind::Preview(Default::default()) }) { - anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state) => { + anki_proto::scheduler::scheduling_state::filtered::Kind::Preview(state) => { FilteredState::Preview(state.into()) } - anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => { + anki_proto::scheduler::scheduling_state::filtered::Kind::Rescheduling(state) => { FilteredState::Rescheduling(state.into()) } } diff --git a/rslib/src/backend/scheduler/states/mod.rs b/rslib/src/backend/scheduler/states/mod.rs index f02c2fa69..ba03c1fa1 100644 --- a/rslib/src/backend/scheduler/states/mod.rs +++ b/rslib/src/backend/scheduler/states/mod.rs @@ -42,12 +42,12 @@ impl From for SchedulingStates { impl From for anki_proto::scheduler::SchedulingState { fn from(state: CardState) -> Self { anki_proto::scheduler::SchedulingState { - value: Some(match state { + kind: Some(match state { CardState::Normal(state) => { - anki_proto::scheduler::scheduling_state::Value::Normal(state.into()) + anki_proto::scheduler::scheduling_state::Kind::Normal(state.into()) } CardState::Filtered(state) => { - anki_proto::scheduler::scheduling_state::Value::Filtered(state.into()) + anki_proto::scheduler::scheduling_state::Kind::Filtered(state.into()) } }), custom_data: None, @@ -57,12 +57,12 @@ impl From for anki_proto::scheduler::SchedulingState { impl From for CardState { fn from(state: anki_proto::scheduler::SchedulingState) -> Self { - if let Some(value) = state.value { + if let Some(value) = state.kind { match value { - anki_proto::scheduler::scheduling_state::Value::Normal(normal) => { + anki_proto::scheduler::scheduling_state::Kind::Normal(normal) => { CardState::Normal(normal.into()) } - anki_proto::scheduler::scheduling_state::Value::Filtered(filtered) => { + anki_proto::scheduler::scheduling_state::Kind::Filtered(filtered) => { CardState::Filtered(filtered.into()) } } diff --git a/rslib/src/backend/scheduler/states/normal.rs b/rslib/src/backend/scheduler/states/normal.rs index 1c8595da3..4b932e389 100644 --- a/rslib/src/backend/scheduler/states/normal.rs +++ b/rslib/src/backend/scheduler/states/normal.rs @@ -6,18 +6,18 @@ use crate::scheduler::states::NormalState; impl From for anki_proto::scheduler::scheduling_state::Normal { fn from(state: NormalState) -> Self { anki_proto::scheduler::scheduling_state::Normal { - value: Some(match state { + kind: Some(match state { NormalState::New(state) => { - anki_proto::scheduler::scheduling_state::normal::Value::New(state.into()) + anki_proto::scheduler::scheduling_state::normal::Kind::New(state.into()) } NormalState::Learning(state) => { - anki_proto::scheduler::scheduling_state::normal::Value::Learning(state.into()) + anki_proto::scheduler::scheduling_state::normal::Kind::Learning(state.into()) } NormalState::Review(state) => { - anki_proto::scheduler::scheduling_state::normal::Value::Review(state.into()) + anki_proto::scheduler::scheduling_state::normal::Kind::Review(state.into()) } NormalState::Relearning(state) => { - anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state.into()) + anki_proto::scheduler::scheduling_state::normal::Kind::Relearning(state.into()) } }), } @@ -26,19 +26,19 @@ impl From for anki_proto::scheduler::scheduling_state::Normal { impl From for NormalState { fn from(state: anki_proto::scheduler::scheduling_state::Normal) -> Self { - match state.value.unwrap_or_else(|| { - anki_proto::scheduler::scheduling_state::normal::Value::New(Default::default()) + match state.kind.unwrap_or_else(|| { + anki_proto::scheduler::scheduling_state::normal::Kind::New(Default::default()) }) { - anki_proto::scheduler::scheduling_state::normal::Value::New(state) => { + anki_proto::scheduler::scheduling_state::normal::Kind::New(state) => { NormalState::New(state.into()) } - anki_proto::scheduler::scheduling_state::normal::Value::Learning(state) => { + anki_proto::scheduler::scheduling_state::normal::Kind::Learning(state) => { NormalState::Learning(state.into()) } - anki_proto::scheduler::scheduling_state::normal::Value::Review(state) => { + anki_proto::scheduler::scheduling_state::normal::Kind::Review(state) => { NormalState::Review(state.into()) } - anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state) => { + anki_proto::scheduler::scheduling_state::normal::Kind::Relearning(state) => { NormalState::Relearning(state.into()) } } diff --git a/sass/base.scss b/sass/base.scss index 14358483a..dcdba3219 100644 --- a/sass/base.scss +++ b/sass/base.scss @@ -78,11 +78,6 @@ samp { unicode-bidi: normal !important; } -.reduce-motion * { - transition: none !important; - animation: none !important; -} - label, input[type="radio"], input[type="checkbox"] { diff --git a/tools/ts-run b/tools/ts-run index 9e36e6096..25bc53c9c 100755 --- a/tools/ts-run +++ b/tools/ts-run @@ -3,4 +3,4 @@ # The pages can be accessed by, eg surfing to # http://localhost:40000/_anki/pages/deckconfig.html -QTWEBENGINE_REMOTE_DEBUGGING=8080 ANKI_API_PORT=40000 SOURCEMAP=1 ./run $* +QTWEBENGINE_REMOTE_DEBUGGING=8080 ANKI_API_PORT=40000 ./run $* diff --git a/ts/bundle_svelte.mjs b/ts/bundle_svelte.mjs index 6c7522c57..61f67acf7 100644 --- a/ts/bundle_svelte.mjs +++ b/ts/bundle_svelte.mjs @@ -18,7 +18,7 @@ if (page_html != null) { } // support Qt 5.14 -const target = ["es6", "chrome77"]; +const target = ["es2020", "chrome77"]; const inlineCss = bundle_css == null; const sourcemap = env.SOURCEMAP && true; let sveltePlugins; diff --git a/ts/card-info/CardInfo.svelte b/ts/card-info/CardInfo.svelte index bb24a90be..4ac7e5f44 100644 --- a/ts/card-info/CardInfo.svelte +++ b/ts/card-info/CardInfo.svelte @@ -3,8 +3,11 @@ Copyright: Ankitects Pty Ltd and contributors License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html --> diff --git a/ts/congrats/CongratsPage.svelte b/ts/congrats/CongratsPage.svelte index 60f51f95d..1b5912c48 100644 --- a/ts/congrats/CongratsPage.svelte +++ b/ts/congrats/CongratsPage.svelte @@ -3,15 +3,15 @@ Copyright: Ankitects Pty Ltd and contributors License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html --> diff --git a/ts/graphs/CardCounts.svelte b/ts/graphs/CardCounts.svelte index 5ba56f455..9f39e9fa8 100644 --- a/ts/graphs/CardCounts.svelte +++ b/ts/graphs/CardCounts.svelte @@ -3,22 +3,21 @@ Copyright: Ankitects Pty Ltd and contributors License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html --> - + {#if controller} {/if}
- {#if sourceData && preferences && revlogRange} + {#if sourceData && revlogRange} {#each graphs as graph}
-
+