Migrate to protobuf-es (#2547)
* Fix .no-reduce-motion missing from graphs spinner, and not being honored
* Begin migration from protobuf.js -> protobuf-es
Motivation:
- Protobuf-es has a nicer API: messages are represented as classes, and
fields which should exist are not marked as nullable.
- As it uses modules, only the proto messages we actually use get included
in our bundle output. Protobuf.js put everything in a namespace, which
prevented tree-shaking, and made it awkward to access inner messages.
- ./run after touching a proto file drops from about 8s to 6s on my machine. The tradeoff
is slower decoding/encoding (#2043), but that was mainly a concern for the
graphs page, and was unblocked by
37151213cd
Approach/notes:
- We generate the new protobuf-es interface in addition to existing
protobuf.js interface, so we can migrate a module at a time, starting
with the graphs module.
- rslib:proto now generates RPC methods for TS in addition to the Python
interface. The input-arg-unrolling behaviour of the Python generation is
not required here, as we declare the input arg as a PlainMessage<T>, which
marks it as requiring all fields to be provided.
- i64 is represented as bigint in protobuf-es. We were using a patch to
protobuf.js to get it to output Javascript numbers instead of long.js
types, but now that our supported browser versions support bigint, it's
probably worth biting the bullet and migrating to bigint use. Our IDs
fit comfortably within MAX_SAFE_INTEGER, but that may not hold for future
fields we add.
- Oneofs are handled differently in protobuf-es, and are going to need
some refactoring.
Other notable changes:
- Added a --mkdir arg to our build runner, so we can create a dir easily
during the build on Windows.
- Simplified the preference handling code, by wrapping the preferences
in an outer store, instead of a separate store for each individual
preference. This means a change to one preference will trigger a redraw
of all components that depend on the preference store, but the redrawing
is cheap after moving the data processing to Rust, and it makes the code
easier to follow.
- Drop async(Reactive).ts in favour of more explicit handling with await
blocks/updating.
- Renamed add_inputs_to_group() -> add_dependency(), and fixed it not adding
dependencies to parent groups. Renamed add() -> add_action() for clarity.
* Remove a couple of unused proto imports
* Migrate card info
* Migrate congrats, image occlusion, and tag editor
+ Fix imports for multi-word proto files.
* Migrate change-notetype
* Migrate deck options
* Bump target to es2020; simplify ts lib list
Have used caniuse.com to confirm Chromium 77, iOS 14.5 and the Chrome
on Android support the full es2017-es2020 features.
* Migrate import-csv
* Migrate i18n and fix missing output types in .js
* Migrate custom scheduling, and remove protobuf.js
To mostly maintain our old API contract, we make use of protobuf-es's
ability to convert to JSON, which follows the same format as protobuf.js
did. It doesn't cover all case: users who were previously changing the
variant of a type will need to update their code, as assigning to a new
variant no longer automatically removes the old one, which will cause an
error when we try to convert back from JSON. But I suspect the large majority
of users are adjusting the current variant rather than creating a new one,
and this saves us having to write proxy wrappers, so it seems like a
reasonable compromise.
One other change I made at the same time was to rename value->kind for
the oneofs in our custom study protos, as 'value' was easily confused
with the 'case/value' output that protobuf-es has.
With protobuf.js codegen removed, touching a proto file and invoking
./run drops from about 8s to 6s.
This closes #2043.
* Allow tree-shaking on protobuf types
* Display backend error messages in our ts alert()
* Make sourcemap generation opt-in for ts-run
Considerably slows down build, and not used most of the time.
This commit is contained in:
parent
7164723a7a
commit
45f5709214
|
@ -3390,8 +3390,11 @@ dependencies = [
|
|||
name = "runner"
|
||||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"anki_io",
|
||||
"anyhow",
|
||||
"camino",
|
||||
"clap 4.2.1",
|
||||
"itertools",
|
||||
"junction",
|
||||
"termcolor",
|
||||
"workspace-hack",
|
||||
|
|
|
@ -42,7 +42,7 @@ fn build_forms(build: &mut Build) -> Result<()> {
|
|||
py_files.push(outpath.replace(".ui", "_qt5.py"));
|
||||
py_files.push(outpath.replace(".ui", "_qt6.py"));
|
||||
}
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:forms",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -65,7 +65,7 @@ fn build_forms(build: &mut Build) -> Result<()> {
|
|||
/// files into a separate folder, the generated files are exported as a separate
|
||||
/// _aqt module.
|
||||
fn build_generated_sources(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:hooks.py",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -79,7 +79,7 @@ fn build_generated_sources(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:sass_vars",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -98,7 +98,7 @@ fn build_generated_sources(build: &mut Build) -> Result<()> {
|
|||
)?;
|
||||
// we need to add a py.typed file to the generated sources, or mypy
|
||||
// will ignore them when used with the generated wheel
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:py.typed",
|
||||
CopyFile {
|
||||
input: "qt/aqt/py.typed".into(),
|
||||
|
@ -125,7 +125,7 @@ fn build_css(build: &mut Build) -> Result<()> {
|
|||
let mut out_path = out_dir.join(stem);
|
||||
out_path.set_extension("css");
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/css",
|
||||
CompileSass {
|
||||
input: scss.into(),
|
||||
|
@ -143,7 +143,7 @@ fn build_css(build: &mut Build) -> Result<()> {
|
|||
],
|
||||
".css",
|
||||
);
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/css",
|
||||
CopyFiles {
|
||||
inputs: other_ts_css.into(),
|
||||
|
@ -153,7 +153,7 @@ fn build_css(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn build_imgs(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/imgs",
|
||||
CopyFiles {
|
||||
inputs: inputs![glob!["qt/aqt/data/web/imgs/*"]],
|
||||
|
@ -164,7 +164,7 @@ fn build_imgs(build: &mut Build) -> Result<()> {
|
|||
|
||||
fn build_js(build: &mut Build) -> Result<()> {
|
||||
for ts_file in &["deckbrowser", "webview", "toolbar", "reviewer-bottom"] {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/js",
|
||||
EsbuildScript {
|
||||
script: "ts/transform_ts.mjs".into(),
|
||||
|
@ -177,7 +177,7 @@ fn build_js(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
let files = inputs![glob!["qt/aqt/data/web/js/*"]];
|
||||
eslint(build, "aqt", "qt/aqt/data/web/js", files.clone())?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:typescript:aqt",
|
||||
TypescriptCheck {
|
||||
tsconfig: "qt/aqt/data/web/js/tsconfig.json".into(),
|
||||
|
@ -188,7 +188,7 @@ fn build_js(build: &mut Build) -> Result<()> {
|
|||
inputs![":ts:editor", ":ts:reviewer:reviewer.js", ":ts:mathjax"],
|
||||
".js",
|
||||
);
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/js",
|
||||
CopyFiles {
|
||||
inputs: files_from_ts.into(),
|
||||
|
@ -199,8 +199,8 @@ fn build_js(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn build_vendor_js(build: &mut Build) -> Result<()> {
|
||||
build.add("qt/aqt:data/web/js/vendor:mathjax", copy_mathjax())?;
|
||||
build.add(
|
||||
build.add_action("qt/aqt:data/web/js/vendor:mathjax", copy_mathjax())?;
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/js/vendor",
|
||||
CopyFiles {
|
||||
inputs: inputs![
|
||||
|
@ -216,7 +216,7 @@ fn build_vendor_js(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn build_pages(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/web/pages",
|
||||
CopyFiles {
|
||||
inputs: inputs![":ts:pages"],
|
||||
|
@ -228,21 +228,21 @@ fn build_pages(build: &mut Build) -> Result<()> {
|
|||
|
||||
fn build_icons(build: &mut Build) -> Result<()> {
|
||||
build_themed_icons(build)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/qt/icons:mdi_unthemed",
|
||||
CopyFiles {
|
||||
inputs: inputs![":node_modules:mdi_unthemed"],
|
||||
output_folder: "qt/_aqt/data/qt/icons",
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/qt/icons:from_src",
|
||||
CopyFiles {
|
||||
inputs: inputs![glob!["qt/aqt/data/qt/icons/*.{png,svg}"]],
|
||||
output_folder: "qt/_aqt/data/qt/icons",
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/qt/icons",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -280,7 +280,7 @@ fn build_themed_icons(build: &mut Build) -> Result<()> {
|
|||
if let Some(&extra) = themed_icons_with_extra.get(stem) {
|
||||
colors.extend(extra);
|
||||
}
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/qt/icons:mdi_themed",
|
||||
BuildThemedIcon {
|
||||
src_icon: path,
|
||||
|
@ -332,7 +332,7 @@ impl BuildAction for BuildThemedIcon<'_> {
|
|||
|
||||
fn build_macos_helper(build: &mut Build) -> Result<()> {
|
||||
if cfg!(target_os = "macos") {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"qt/aqt:data/lib:libankihelper",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -351,7 +351,7 @@ fn build_macos_helper(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn build_wheel(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"wheels:aqt",
|
||||
BuildWheel {
|
||||
name: "aqt",
|
||||
|
@ -371,7 +371,7 @@ fn check_python(build: &mut Build) -> Result<()> {
|
|||
inputs![glob!("qt/**/*.py", "qt/bundle/PyOxidizer/**")],
|
||||
)?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:pytest:aqt",
|
||||
PythonTest {
|
||||
folder: "qt/tests",
|
||||
|
|
|
@ -145,7 +145,7 @@ fn download_dist_folder_deps(build: &mut Build) -> Result<()> {
|
|||
)?;
|
||||
bundle_deps.extend([":extract:linux_qt_plugins"]);
|
||||
}
|
||||
build.add_inputs_to_group(
|
||||
build.add_dependency(
|
||||
"bundle:deps",
|
||||
inputs![bundle_deps
|
||||
.iter()
|
||||
|
@ -189,7 +189,7 @@ fn setup_primary_venv(build: &mut Build) -> Result<()> {
|
|||
if cfg!(windows) {
|
||||
qt6_reqs = inputs![qt6_reqs, "python/requirements.win.txt"];
|
||||
}
|
||||
build.add(
|
||||
build.add_action(
|
||||
PRIMARY_VENV.label,
|
||||
PythonEnvironment {
|
||||
folder: PRIMARY_VENV.path_without_builddir,
|
||||
|
@ -210,7 +210,7 @@ fn setup_qt5_venv(build: &mut Build) -> Result<()> {
|
|||
"python/requirements.qt5_15.txt"
|
||||
}
|
||||
];
|
||||
build.add(
|
||||
build.add_action(
|
||||
QT5_VENV.label,
|
||||
PythonEnvironment {
|
||||
folder: QT5_VENV.path_without_builddir,
|
||||
|
@ -238,7 +238,7 @@ impl BuildAction for InstallAnkiWheels {
|
|||
}
|
||||
|
||||
fn install_anki_wheels(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"bundle:add_wheels:qt6",
|
||||
InstallAnkiWheels { venv: PRIMARY_VENV },
|
||||
)?;
|
||||
|
@ -246,13 +246,13 @@ fn install_anki_wheels(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn build_pyoxidizer(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"bundle:pyoxidizer:repo",
|
||||
SyncSubmodule {
|
||||
path: "qt/bundle/PyOxidizer",
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"bundle:pyoxidizer:bin",
|
||||
CargoBuild {
|
||||
inputs: inputs![":bundle:pyoxidizer:repo", glob!["qt/bundle/PyOxidizer/**"]],
|
||||
|
@ -297,7 +297,7 @@ impl BuildAction for BuildArtifacts {
|
|||
}
|
||||
|
||||
fn build_artifacts(build: &mut Build) -> Result<()> {
|
||||
build.add("bundle:artifacts", BuildArtifacts {})
|
||||
build.add_action("bundle:artifacts", BuildArtifacts {})
|
||||
}
|
||||
|
||||
struct BuildBundle {}
|
||||
|
@ -321,7 +321,7 @@ impl BuildAction for BuildBundle {
|
|||
}
|
||||
|
||||
fn build_binary(build: &mut Build) -> Result<()> {
|
||||
build.add("bundle:binary", BuildBundle {})
|
||||
build.add_action("bundle:binary", BuildBundle {})
|
||||
}
|
||||
|
||||
struct BuildDistFolder {
|
||||
|
@ -359,7 +359,7 @@ fn build_dist_folder(build: &mut Build, kind: DistKind) -> Result<()> {
|
|||
DistKind::Standard => "bundle:folder:std",
|
||||
DistKind::Alternate => "bundle:folder:alt",
|
||||
};
|
||||
build.add(group, BuildDistFolder { kind, deps })
|
||||
build.add_action(group, BuildDistFolder { kind, deps })
|
||||
}
|
||||
|
||||
fn build_packages(build: &mut Build) -> Result<()> {
|
||||
|
@ -409,7 +409,7 @@ impl BuildAction for BuildTarball {
|
|||
|
||||
fn build_tarball(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
let name = kind.folder_name();
|
||||
build.add(format!("bundle:package:{name}"), BuildTarball { kind })
|
||||
build.add_action(format!("bundle:package:{name}"), BuildTarball { kind })
|
||||
}
|
||||
|
||||
struct BuildWindowsInstallers {}
|
||||
|
@ -434,7 +434,7 @@ impl BuildAction for BuildWindowsInstallers {
|
|||
}
|
||||
|
||||
fn build_windows_installers(build: &mut Build) -> Result<()> {
|
||||
build.add("bundle:package", BuildWindowsInstallers {})
|
||||
build.add_action("bundle:package", BuildWindowsInstallers {})
|
||||
}
|
||||
|
||||
struct BuildMacApp {
|
||||
|
@ -456,7 +456,7 @@ impl BuildAction for BuildMacApp {
|
|||
}
|
||||
|
||||
fn build_mac_app(build: &mut Build, kind: DistKind) -> Result<()> {
|
||||
build.add(format!("bundle:app:{}", kind.name()), BuildMacApp { kind })
|
||||
build.add_action(format!("bundle:app:{}", kind.name()), BuildMacApp { kind })
|
||||
}
|
||||
|
||||
struct BuildDmgs {}
|
||||
|
@ -488,5 +488,5 @@ impl BuildAction for BuildDmgs {
|
|||
}
|
||||
|
||||
fn build_dmgs(build: &mut Build) -> Result<()> {
|
||||
build.add("bundle:dmg", BuildDmgs {})
|
||||
build.add_action("bundle:dmg", BuildDmgs {})
|
||||
}
|
||||
|
|
|
@ -41,14 +41,14 @@ pub fn setup_protoc(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
pub fn check_proto(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:format:proto",
|
||||
ClangFormat {
|
||||
inputs: inputs![glob!["proto/**/*.proto"]],
|
||||
check_only: true,
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"format:proto",
|
||||
ClangFormat {
|
||||
inputs: inputs![glob!["proto/**/*.proto"]],
|
||||
|
|
|
@ -20,21 +20,21 @@ use crate::python::GenPythonProto;
|
|||
|
||||
pub fn build_pylib(build: &mut Build) -> Result<()> {
|
||||
// generated files
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylib/anki:proto",
|
||||
GenPythonProto {
|
||||
proto_files: inputs![glob!["proto/anki/*.proto"]],
|
||||
},
|
||||
)?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylib/anki:_fluent.py",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
args: "$script $strings $out",
|
||||
inputs: hashmap! {
|
||||
"script" => inputs!["pylib/tools/genfluent.py"],
|
||||
"strings" => inputs![":rslib/i18n:strings.json"],
|
||||
"strings" => inputs![":rslib:i18n:strings.json"],
|
||||
"" => inputs!["pylib/anki/_vendor/stringcase.py"]
|
||||
},
|
||||
outputs: hashmap! {
|
||||
|
@ -42,7 +42,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylib/anki:hooks_gen.py",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -56,7 +56,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylib/anki:_rsbridge",
|
||||
LinkFile {
|
||||
input: inputs![":pylib/rsbridge"],
|
||||
|
@ -69,10 +69,10 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
),
|
||||
},
|
||||
)?;
|
||||
build.add("pylib/anki:buildinfo.py", GenBuildInfo {})?;
|
||||
build.add_action("pylib/anki:buildinfo.py", GenBuildInfo {})?;
|
||||
|
||||
// wheel
|
||||
build.add(
|
||||
build.add_action(
|
||||
"wheels:anki",
|
||||
BuildWheel {
|
||||
name: "anki",
|
||||
|
@ -93,7 +93,7 @@ pub fn build_pylib(build: &mut Build) -> Result<()> {
|
|||
pub fn check_pylib(build: &mut Build) -> Result<()> {
|
||||
python_format(build, "pylib", inputs![glob!("pylib/**/*.py")])?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:pytest:pylib",
|
||||
PythonTest {
|
||||
folder: "pylib/tests",
|
||||
|
|
|
@ -32,7 +32,7 @@ pub fn setup_venv(build: &mut Build) -> Result<()> {
|
|||
"python/requirements.qt6_5.txt",
|
||||
]
|
||||
};
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pyenv",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv",
|
||||
|
@ -57,7 +57,7 @@ pub fn setup_venv(build: &mut Build) -> Result<()> {
|
|||
reqs_qt5 = inputs![reqs_qt5, "python/requirements.win.txt"];
|
||||
}
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pyenv-qt5.15",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt5.15",
|
||||
|
@ -66,7 +66,7 @@ pub fn setup_venv(build: &mut Build) -> Result<()> {
|
|||
extra_binary_exports: &[],
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pyenv-qt5.14",
|
||||
PythonEnvironment {
|
||||
folder: "pyenv-qt5.14",
|
||||
|
@ -110,7 +110,7 @@ impl BuildAction for GenPythonProto {
|
|||
build.add_outputs("", python_outputs);
|
||||
// not a direct dependency, but we include the output interface in our declared
|
||||
// outputs
|
||||
build.add_inputs("", inputs!["rslib/proto"]);
|
||||
build.add_inputs("", inputs![":rslib:proto"]);
|
||||
build.add_outputs("", vec!["pylib/anki/_backend_generated.py"]);
|
||||
}
|
||||
}
|
||||
|
@ -159,7 +159,7 @@ pub fn check_python(build: &mut Build) -> Result<()> {
|
|||
python_format(build, "ftl", inputs![glob!("ftl/**/*.py")])?;
|
||||
python_format(build, "tools", inputs![glob!("tools/**/*.py")])?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:mypy",
|
||||
PythonTypecheck {
|
||||
folders: &[
|
||||
|
@ -190,7 +190,7 @@ fn add_pylint(build: &mut Build) -> Result<()> {
|
|||
// pylint does not support PEP420 implicit namespaces split across import paths,
|
||||
// so we need to merge our pylib sources and generated files before invoking it,
|
||||
// and add a top-level __init__.py
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylint/anki",
|
||||
RsyncFiles {
|
||||
inputs: inputs![":pylib/anki"],
|
||||
|
@ -200,7 +200,7 @@ fn add_pylint(build: &mut Build) -> Result<()> {
|
|||
extra_args: "--links",
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylint/anki",
|
||||
RsyncFiles {
|
||||
inputs: inputs![glob!["pylib/anki/**"]],
|
||||
|
@ -209,7 +209,7 @@ fn add_pylint(build: &mut Build) -> Result<()> {
|
|||
extra_args: "",
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylint/anki",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
|
@ -218,7 +218,7 @@ fn add_pylint(build: &mut Build) -> Result<()> {
|
|||
outputs: hashmap! { "out" => vec!["pylint/anki/__init__.py"] },
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:pylint",
|
||||
PythonLint {
|
||||
folders: &[
|
||||
|
|
|
@ -28,21 +28,21 @@ pub fn build_rust(build: &mut Build) -> Result<()> {
|
|||
|
||||
fn prepare_translations(build: &mut Build) -> Result<()> {
|
||||
// ensure repos are checked out
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ftl:repo:core",
|
||||
SyncSubmodule {
|
||||
path: "ftl/core-repo",
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ftl:repo:qt",
|
||||
SyncSubmodule {
|
||||
path: "ftl/qt-repo",
|
||||
},
|
||||
)?;
|
||||
// build anki_i18n and spit out strings.json
|
||||
build.add(
|
||||
"rslib/i18n",
|
||||
build.add_action(
|
||||
"rslib:i18n",
|
||||
CargoBuild {
|
||||
inputs: inputs![
|
||||
glob!["rslib/i18n/**"],
|
||||
|
@ -59,7 +59,7 @@ fn prepare_translations(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
)?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ftl:sync",
|
||||
CargoRun {
|
||||
binary_name: "ftl-sync",
|
||||
|
@ -69,7 +69,7 @@ fn prepare_translations(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
)?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ftl:deprecate",
|
||||
CargoRun {
|
||||
binary_name: "deprecate_ftl_entries",
|
||||
|
@ -84,8 +84,8 @@ fn prepare_translations(build: &mut Build) -> Result<()> {
|
|||
|
||||
fn prepare_proto_descriptors(build: &mut Build) -> Result<()> {
|
||||
// build anki_proto and spit out descriptors/Python interface
|
||||
build.add(
|
||||
"rslib/proto",
|
||||
build.add_action(
|
||||
"rslib:proto",
|
||||
CargoBuild {
|
||||
inputs: inputs![glob!["{proto,rslib/proto}/**"], "$protoc_binary",],
|
||||
outputs: &[RustOutput::Data(
|
||||
|
@ -106,7 +106,7 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
} else {
|
||||
"native-tls"
|
||||
};
|
||||
build.add(
|
||||
build.add_action(
|
||||
"pylib/rsbridge",
|
||||
CargoBuild {
|
||||
inputs: inputs![
|
||||
|
@ -114,8 +114,8 @@ fn build_rsbridge(build: &mut Build) -> Result<()> {
|
|||
// declare a dependency on i18n/proto so it gets built first, allowing
|
||||
// things depending on strings.json to build faster, and ensuring
|
||||
// changes to the ftl files trigger a rebuild
|
||||
":rslib/i18n",
|
||||
":rslib/proto",
|
||||
":rslib:i18n",
|
||||
":rslib:proto",
|
||||
// when env vars change the build hash gets updated
|
||||
"$builddir/build.ninja",
|
||||
// building on Windows requires python3.lib
|
||||
|
@ -140,7 +140,7 @@ pub fn check_rust(build: &mut Build) -> Result<()> {
|
|||
"Cargo.toml",
|
||||
"rust-toolchain.toml",
|
||||
];
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:format:rust",
|
||||
CargoFormat {
|
||||
inputs: inputs.clone(),
|
||||
|
@ -148,7 +148,7 @@ pub fn check_rust(build: &mut Build) -> Result<()> {
|
|||
working_dir: Some("cargo/format"),
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"format:rust",
|
||||
CargoFormat {
|
||||
inputs: inputs.clone(),
|
||||
|
@ -163,13 +163,13 @@ pub fn check_rust(build: &mut Build) -> Result<()> {
|
|||
":pylib/rsbridge"
|
||||
];
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:clippy",
|
||||
CargoClippy {
|
||||
inputs: inputs.clone(),
|
||||
},
|
||||
)?;
|
||||
build.add("check:rust_test", CargoTest { inputs })?;
|
||||
build.add_action("check:rust_test", CargoTest { inputs })?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -193,7 +193,7 @@ pub fn check_minilints(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn on_first_instance(&self, build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"build:minilints",
|
||||
CargoBuild {
|
||||
inputs: inputs![glob!("tools/minilints/**/*")],
|
||||
|
@ -211,14 +211,14 @@ pub fn check_minilints(build: &mut Build) -> Result<()> {
|
|||
"{node_modules,qt/bundle/PyOxidizer}/**"
|
||||
]];
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:minilints",
|
||||
RunMinilints {
|
||||
deps: files.clone(),
|
||||
fix: false,
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"fix:minilints",
|
||||
RunMinilints {
|
||||
deps: files,
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
// use super::*;
|
||||
use ninja_gen::action::BuildAction;
|
||||
use ninja_gen::command::RunCommand;
|
||||
use ninja_gen::glob;
|
||||
|
@ -10,6 +9,7 @@ use ninja_gen::input::BuildInput;
|
|||
use ninja_gen::inputs;
|
||||
use ninja_gen::node::node_archive;
|
||||
use ninja_gen::node::CompileSass;
|
||||
use ninja_gen::node::CompileTypescript;
|
||||
use ninja_gen::node::DPrint;
|
||||
use ninja_gen::node::EsbuildScript;
|
||||
use ninja_gen::node::Eslint;
|
||||
|
@ -47,9 +47,8 @@ fn setup_node(build: &mut Build) -> Result<()> {
|
|||
"sass",
|
||||
"tsc",
|
||||
"tsx",
|
||||
"pbjs",
|
||||
"pbts",
|
||||
"jest",
|
||||
"protoc-gen-es",
|
||||
],
|
||||
hashmap! {
|
||||
"jquery" => vec![
|
||||
|
@ -116,14 +115,14 @@ fn setup_node(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
fn build_and_check_tslib(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ts:lib:i18n",
|
||||
RunCommand {
|
||||
command: ":pyenv:bin",
|
||||
args: "$script $strings $out",
|
||||
inputs: hashmap! {
|
||||
"script" => inputs!["ts/lib/genfluent.py"],
|
||||
"strings" => inputs![":rslib/i18n:strings.json"],
|
||||
"strings" => inputs![":rslib:i18n:strings.json"],
|
||||
"" => inputs!["pylib/anki/_vendor/stringcase.py"]
|
||||
},
|
||||
outputs: hashmap! {
|
||||
|
@ -136,23 +135,38 @@ fn build_and_check_tslib(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
"ts:lib:backend_proto.d.ts",
|
||||
build.add_action(
|
||||
"ts:lib:proto",
|
||||
GenTypescriptProto {
|
||||
protos: inputs![glob!["proto/anki/*.proto"]],
|
||||
output_stem: "ts/lib/backend_proto",
|
||||
protos: inputs![glob!["proto/**/*.proto"]],
|
||||
include_dirs: &["proto"],
|
||||
out_dir: "out/ts/lib",
|
||||
out_path_transform: |path| path.replace("proto/", "ts/lib/"),
|
||||
py_transform_script: "pylib/tools/markpure.py",
|
||||
},
|
||||
)?;
|
||||
// ensure _service files are generated by rslib
|
||||
build.add_dependency("ts:lib:proto", inputs![":rslib:proto"]);
|
||||
// the generated _service.js files import @tslib/post, and esbuild won't be able
|
||||
// to import the .ts file, so we need to generate a .js file for it
|
||||
build.add_action(
|
||||
"ts:lib:proto",
|
||||
CompileTypescript {
|
||||
ts_files: "ts/lib/post.ts".into(),
|
||||
out_dir: "out/ts/lib",
|
||||
out_path_transform: |path| path.into(),
|
||||
},
|
||||
)?;
|
||||
|
||||
let src_files = inputs![glob!["ts/lib/**"]];
|
||||
eslint(build, "lib", "ts/lib", inputs![":ts:lib", &src_files])?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:jest:lib",
|
||||
jest_test("ts/lib", inputs![":ts:lib", &src_files], true),
|
||||
)?;
|
||||
|
||||
build.add_inputs_to_group("ts:lib", src_files);
|
||||
build.add_dependency("ts:lib", src_files);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -178,11 +192,11 @@ fn declare_and_check_other_libraries(build: &mut Build) -> Result<()> {
|
|||
] {
|
||||
let library_with_ts = format!("ts:{library}");
|
||||
let folder = library_with_ts.replace(':', "/");
|
||||
build.add_inputs_to_group(&library_with_ts, inputs.clone());
|
||||
build.add_dependency(&library_with_ts, inputs.clone());
|
||||
eslint(build, library, &folder, inputs.clone())?;
|
||||
|
||||
if matches!(library, "domlib" | "html-filter") {
|
||||
build.add(
|
||||
build.add_action(
|
||||
&format!("check:jest:{library}"),
|
||||
jest_test(&folder, inputs, true),
|
||||
)?;
|
||||
|
@ -201,7 +215,7 @@ fn declare_and_check_other_libraries(build: &mut Build) -> Result<()> {
|
|||
|
||||
pub fn eslint(build: &mut Build, name: &str, folder: &str, deps: BuildInput) -> Result<()> {
|
||||
let eslint_rc = inputs![".eslintrc.js"];
|
||||
build.add(
|
||||
build.add_action(
|
||||
format!("check:eslint:{name}"),
|
||||
Eslint {
|
||||
folder,
|
||||
|
@ -210,7 +224,7 @@ pub fn eslint(build: &mut Build, name: &str, folder: &str, deps: BuildInput) ->
|
|||
fix: false,
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
format!("fix:eslint:{name}"),
|
||||
Eslint {
|
||||
folder,
|
||||
|
@ -223,13 +237,13 @@ pub fn eslint(build: &mut Build, name: &str, folder: &str, deps: BuildInput) ->
|
|||
}
|
||||
|
||||
fn build_and_check_pages(build: &mut Build) -> Result<()> {
|
||||
build.add_inputs_to_group("ts:tag-editor", inputs![glob!["ts/tag-editor/**"]]);
|
||||
build.add_dependency("ts:tag-editor", inputs![glob!["ts/tag-editor/**"]]);
|
||||
|
||||
let mut build_page = |name: &str, html: bool, deps: BuildInput| -> Result<()> {
|
||||
let group = format!("ts:pages:{name}");
|
||||
let deps = inputs![deps, glob!(format!("ts/{name}/**"))];
|
||||
let extra_exts = if html { &["css", "html"][..] } else { &["css"] };
|
||||
build.add(
|
||||
build.add_action(
|
||||
&group,
|
||||
EsbuildScript {
|
||||
script: inputs!["ts/bundle_svelte.mjs"],
|
||||
|
@ -239,7 +253,7 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> {
|
|||
extra_exts,
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
format!("check:svelte:{name}"),
|
||||
SvelteCheck {
|
||||
tsconfig: inputs![format!("ts/{name}/tsconfig.json")],
|
||||
|
@ -249,7 +263,7 @@ fn build_and_check_pages(build: &mut Build) -> Result<()> {
|
|||
let folder = format!("ts/{name}");
|
||||
eslint(build, name, &folder, deps.clone())?;
|
||||
if matches!(name, "deck-options" | "change-notetype") {
|
||||
build.add(
|
||||
build.add_action(
|
||||
&format!("check:jest:{name}"),
|
||||
jest_test(&folder, deps, false),
|
||||
)?;
|
||||
|
@ -365,7 +379,7 @@ fn build_and_check_editor(build: &mut Build) -> Result<()> {
|
|||
|
||||
let mut build_editor_page = |name: &str, entrypoint: &str| -> Result<()> {
|
||||
let stem = format!("ts/editor/{name}");
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ts:editor",
|
||||
EsbuildScript {
|
||||
script: inputs!["ts/bundle_svelte.mjs"],
|
||||
|
@ -382,7 +396,7 @@ fn build_and_check_editor(build: &mut Build) -> Result<()> {
|
|||
build_editor_page("note_creator", "index_creator")?;
|
||||
|
||||
let group = "ts/editor";
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:svelte:editor",
|
||||
SvelteCheck {
|
||||
tsconfig: inputs![format!("{group}/tsconfig.json")],
|
||||
|
@ -395,7 +409,7 @@ fn build_and_check_editor(build: &mut Build) -> Result<()> {
|
|||
|
||||
fn build_and_check_reviewer(build: &mut Build) -> Result<()> {
|
||||
let reviewer_deps = inputs![":ts:lib", glob!("ts/{reviewer,image-occlusion}/**"),];
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ts:reviewer:reviewer.js",
|
||||
EsbuildScript {
|
||||
script: inputs!["ts/bundle_ts.mjs"],
|
||||
|
@ -405,7 +419,7 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> {
|
|||
extra_exts: &[],
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ts:reviewer:reviewer.css",
|
||||
CompileSass {
|
||||
input: inputs!["ts/reviewer/reviewer.scss"],
|
||||
|
@ -414,7 +428,7 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> {
|
|||
load_paths: vec!["."],
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ts:reviewer:reviewer_extras_bundle.js",
|
||||
EsbuildScript {
|
||||
script: inputs!["ts/bundle_ts.mjs"],
|
||||
|
@ -425,26 +439,31 @@ fn build_and_check_reviewer(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
)?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:typescript:reviewer",
|
||||
TypescriptCheck {
|
||||
tsconfig: inputs!["ts/reviewer/tsconfig.json"],
|
||||
inputs: reviewer_deps.clone(),
|
||||
},
|
||||
)?;
|
||||
eslint(build, "reviewer", "ts/reviewer", reviewer_deps)
|
||||
eslint(build, "reviewer", "ts/reviewer", reviewer_deps)?;
|
||||
build.add_action(
|
||||
"check:jest:reviewer",
|
||||
jest_test("ts/reviewer", inputs![":ts:reviewer"], false),
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_web(build: &mut Build) -> Result<()> {
|
||||
let dprint_files = inputs![glob!["**/*.{ts,mjs,js,md,json,toml,svelte}", "target/**"]];
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:format:dprint",
|
||||
DPrint {
|
||||
inputs: dprint_files.clone(),
|
||||
check_only: true,
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"format:dprint",
|
||||
DPrint {
|
||||
inputs: dprint_files,
|
||||
|
@ -456,14 +475,14 @@ fn check_web(build: &mut Build) -> Result<()> {
|
|||
}
|
||||
|
||||
pub fn check_sql(build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:format:sql",
|
||||
SqlFormat {
|
||||
inputs: inputs![glob!["**/*.sql"]],
|
||||
check_only: true,
|
||||
},
|
||||
)?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"format:sql",
|
||||
SqlFormat {
|
||||
inputs: inputs![glob!["**/*.sql"]],
|
||||
|
@ -475,7 +494,7 @@ pub fn check_sql(build: &mut Build) -> Result<()> {
|
|||
|
||||
fn build_and_check_mathjax(build: &mut Build) -> Result<()> {
|
||||
let files = inputs![glob!["ts/mathjax/*"]];
|
||||
build.add(
|
||||
build.add_action(
|
||||
"ts:mathjax",
|
||||
EsbuildScript {
|
||||
script: "ts/transform_ts.mjs".into(),
|
||||
|
@ -486,7 +505,7 @@ fn build_and_check_mathjax(build: &mut Build) -> Result<()> {
|
|||
},
|
||||
)?;
|
||||
eslint(build, "mathjax", "ts/mathjax", files.clone())?;
|
||||
build.add(
|
||||
build.add_action(
|
||||
"check:typescript:mathjax",
|
||||
TypescriptCheck {
|
||||
tsconfig: "ts/mathjax/tsconfig.json".into(),
|
||||
|
@ -576,9 +595,9 @@ pub fn copy_mathjax() -> impl BuildAction {
|
|||
}
|
||||
|
||||
fn build_sass(build: &mut Build) -> Result<()> {
|
||||
build.add_inputs_to_group("sass", inputs![glob!("sass/**")]);
|
||||
build.add_dependency("sass", inputs![glob!("sass/**")]);
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
"css:_root-vars",
|
||||
CompileSass {
|
||||
input: inputs!["sass/_root-vars.scss"],
|
||||
|
|
|
@ -160,7 +160,7 @@ where
|
|||
fn build_archive_tool(build: &mut Build) -> Result<()> {
|
||||
build.once_only("build_archive_tool", |build| {
|
||||
let features = Platform::tls_feature();
|
||||
build.add(
|
||||
build.add_action(
|
||||
"build:archives",
|
||||
CargoBuild {
|
||||
inputs: inputs![glob!("build/archives/**/*")],
|
||||
|
@ -186,10 +186,10 @@ where
|
|||
I::Item: AsRef<str>,
|
||||
{
|
||||
let download_group = format!("download:{group_name}");
|
||||
build.add(&download_group, DownloadArchive { archive })?;
|
||||
build.add_action(&download_group, DownloadArchive { archive })?;
|
||||
|
||||
let extract_group = format!("extract:{group_name}");
|
||||
build.add(
|
||||
build.add_action(
|
||||
extract_group,
|
||||
ExtractArchive {
|
||||
archive_path: inputs![format!(":{download_group}")],
|
||||
|
|
|
@ -49,7 +49,7 @@ impl Build {
|
|||
groups: Default::default(),
|
||||
};
|
||||
|
||||
build.add("build:run_configure", ConfigureBuild {})?;
|
||||
build.add_action("build:run_configure", ConfigureBuild {})?;
|
||||
|
||||
Ok(build)
|
||||
}
|
||||
|
@ -76,7 +76,7 @@ impl Build {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn add(&mut self, group: impl AsRef<str>, action: impl BuildAction) -> Result<()> {
|
||||
pub fn add_action(&mut self, group: impl AsRef<str>, action: impl BuildAction) -> Result<()> {
|
||||
let group = group.as_ref();
|
||||
let groups = split_groups(group);
|
||||
let group = groups[0];
|
||||
|
@ -104,7 +104,7 @@ impl Build {
|
|||
BuildStatement::from_build_action(group, action, &self.groups, self.release);
|
||||
|
||||
if first_invocation {
|
||||
let command = statement.prepare_command(command);
|
||||
let command = statement.prepare_command(command)?;
|
||||
writeln!(
|
||||
&mut self.output_text,
|
||||
"\
|
||||
|
@ -130,8 +130,9 @@ rule {action_name}
|
|||
Ok(())
|
||||
}
|
||||
|
||||
/// Add one or more resolved files to a group.
|
||||
pub fn add_resolved_files_to_group<'a>(
|
||||
/// Add one or more resolved files to a group. Does not add to the parent
|
||||
/// groups; that must be done by the caller.
|
||||
fn add_resolved_files_to_group<'a>(
|
||||
&mut self,
|
||||
group: &str,
|
||||
files: impl IntoIterator<Item = &'a String>,
|
||||
|
@ -140,17 +141,15 @@ rule {action_name}
|
|||
buf.extend(files.into_iter().map(ToString::to_string));
|
||||
}
|
||||
|
||||
pub fn add_inputs_to_group(&mut self, group: &str, inputs: BuildInput) {
|
||||
self.add_resolved_files_to_group(group, &self.expand_inputs(inputs));
|
||||
}
|
||||
|
||||
/// Group names should not have a leading `:`.
|
||||
pub fn add_group_to_group(&mut self, target_group: &str, additional_group: &str) {
|
||||
let additional_files = self
|
||||
.groups
|
||||
.get(additional_group)
|
||||
.unwrap_or_else(|| panic!("{additional_group} had no files"));
|
||||
self.add_resolved_files_to_group(target_group, &additional_files.clone())
|
||||
/// Allows you to add dependencies on files or build steps that aren't
|
||||
/// required to build the group itself, but are required by consumers of
|
||||
/// that group.
|
||||
pub fn add_dependency(&mut self, group: &str, deps: BuildInput) {
|
||||
let files = self.expand_inputs(deps);
|
||||
let groups = split_groups(group);
|
||||
for group in groups {
|
||||
self.add_resolved_files_to_group(group, &files);
|
||||
}
|
||||
}
|
||||
|
||||
/// Outputs from a given build statement group. An error if no files have
|
||||
|
@ -215,6 +214,7 @@ struct BuildStatement<'a> {
|
|||
output_stamp: bool,
|
||||
env_vars: Vec<String>,
|
||||
working_dir: Option<String>,
|
||||
create_dirs: Vec<String>,
|
||||
release: bool,
|
||||
bypass_runner: bool,
|
||||
}
|
||||
|
@ -239,6 +239,7 @@ impl BuildStatement<'_> {
|
|||
output_stamp: false,
|
||||
env_vars: Default::default(),
|
||||
working_dir: None,
|
||||
create_dirs: Default::default(),
|
||||
release,
|
||||
bypass_runner: action.bypass_runner(),
|
||||
};
|
||||
|
@ -281,28 +282,29 @@ impl BuildStatement<'_> {
|
|||
(outputs_vec, self.output_subsets)
|
||||
}
|
||||
|
||||
fn prepare_command(&mut self, command: String) -> String {
|
||||
fn prepare_command(&mut self, command: String) -> Result<String> {
|
||||
if self.bypass_runner {
|
||||
return command;
|
||||
return Ok(command);
|
||||
}
|
||||
if command.starts_with("$runner") {
|
||||
self.implicit_inputs.push("$runner".into());
|
||||
return command;
|
||||
return Ok(command);
|
||||
}
|
||||
let mut buf = String::from("$runner run ");
|
||||
if self.output_stamp {
|
||||
write!(&mut buf, "--stamp=$stamp ").unwrap();
|
||||
write!(&mut buf, "--stamp=$stamp ")?;
|
||||
}
|
||||
if !self.env_vars.is_empty() {
|
||||
for var in &self.env_vars {
|
||||
write!(&mut buf, "--env={var} ").unwrap();
|
||||
}
|
||||
for var in &self.env_vars {
|
||||
write!(&mut buf, "--env=\"{var}\" ")?;
|
||||
}
|
||||
for dir in &self.create_dirs {
|
||||
write!(&mut buf, "--mkdir={dir} ")?;
|
||||
}
|
||||
if let Some(working_dir) = &self.working_dir {
|
||||
write!(&mut buf, "--cwd={working_dir} ").unwrap();
|
||||
write!(&mut buf, "--cwd={working_dir} ")?;
|
||||
}
|
||||
buf.push_str(&command);
|
||||
buf
|
||||
Ok(buf)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -370,6 +372,10 @@ pub trait FilesHandle {
|
|||
/// for each command, `constant_value` should reference a `$variable` you
|
||||
/// have defined.
|
||||
fn set_working_dir(&mut self, constant_value: &str);
|
||||
/// Ensure provided folder and parent folders are created before running
|
||||
/// the command. Can be called multiple times. Defines a variable pointing
|
||||
/// at the folder.
|
||||
fn create_dir_all(&mut self, key: &str, path: impl Into<String>);
|
||||
|
||||
fn release_build(&self) -> bool;
|
||||
}
|
||||
|
@ -462,6 +468,12 @@ impl FilesHandle for BuildStatement<'_> {
|
|||
fn set_working_dir(&mut self, constant_value: &str) {
|
||||
self.working_dir = Some(constant_value.to_owned());
|
||||
}
|
||||
|
||||
fn create_dir_all(&mut self, key: &str, path: impl Into<String>) {
|
||||
let path = path.into();
|
||||
self.add_variable(key, &path);
|
||||
self.create_dirs.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
fn to_ninja_target_string(explicit: &[String], implicit: &[String]) -> String {
|
||||
|
|
|
@ -137,7 +137,7 @@ impl BuildAction for CargoTest {
|
|||
}
|
||||
|
||||
fn on_first_instance(&self, build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"cargo-nextest",
|
||||
CargoInstall {
|
||||
binary_name: "cargo-nextest",
|
||||
|
|
|
@ -25,7 +25,7 @@ impl BuildAction for ConfigureBuild {
|
|||
}
|
||||
|
||||
fn on_first_instance(&self, build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"build:configure",
|
||||
CargoBuild {
|
||||
inputs: inputs![glob!["build/**/*"]],
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
||||
use super::*;
|
||||
use crate::action::BuildAction;
|
||||
use crate::archives::download_and_extract;
|
||||
|
@ -135,10 +137,10 @@ pub fn setup_node(
|
|||
Utf8Path::new(&path).is_absolute(),
|
||||
"YARN_BINARY must be absolute"
|
||||
);
|
||||
build.add_resolved_files_to_group("yarn:bin", &vec![path]);
|
||||
build.add_dependency("yarn:bin", inputs![path]);
|
||||
}
|
||||
Err(_) => {
|
||||
build.add("yarn", YarnSetup {})?;
|
||||
build.add_action("yarn", YarnSetup {})?;
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -148,7 +150,7 @@ pub fn setup_node(
|
|||
vec![format!(".bin/{}", with_cmd_ext(binary)).into()],
|
||||
);
|
||||
}
|
||||
build.add(
|
||||
build.add_action(
|
||||
"node_modules",
|
||||
YarnInstall {
|
||||
package_json_and_lock: inputs!["yarn.lock", "package.json"],
|
||||
|
@ -326,30 +328,60 @@ impl BuildAction for SqlFormat {
|
|||
}
|
||||
}
|
||||
|
||||
pub struct GenTypescriptProto {
|
||||
pub struct GenTypescriptProto<'a> {
|
||||
pub protos: BuildInput,
|
||||
/// .js and .d.ts will be added to it
|
||||
pub output_stem: &'static str,
|
||||
pub include_dirs: &'a [&'a str],
|
||||
/// Automatically created.
|
||||
pub out_dir: &'a str,
|
||||
/// Can be used to adjust the output js/dts files to point to out_dir.
|
||||
pub out_path_transform: fn(&str) -> String,
|
||||
/// Script to apply modifications to the generated files.
|
||||
pub py_transform_script: &'static str,
|
||||
}
|
||||
|
||||
impl BuildAction for GenTypescriptProto {
|
||||
impl BuildAction for GenTypescriptProto<'_> {
|
||||
fn command(&self) -> &str {
|
||||
"$pbjs --target=static-module --wrap=default --force-number --force-message --out=$static $in && $
|
||||
$pbjs --target=json-module --wrap=default --force-number --force-message --out=$js $in && $
|
||||
$pbts --out=$dts $static && $
|
||||
rm $static"
|
||||
"$protoc $includes $in \
|
||||
--plugin $gen-es --es_out $out_dir && \
|
||||
$pyenv_bin $script $out_dir"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl build::FilesHandle) {
|
||||
build.add_inputs("pbjs", inputs![":node_modules:pbjs"]);
|
||||
build.add_inputs("pbts", inputs![":node_modules:pbts"]);
|
||||
build.add_inputs("in", &self.protos);
|
||||
build.add_inputs("", inputs!["yarn.lock"]);
|
||||
let proto_files = build.expand_inputs(&self.protos);
|
||||
let output_files: Vec<_> = proto_files
|
||||
.iter()
|
||||
.flat_map(|f| {
|
||||
let js_path = f.replace(".proto", "_pb.js");
|
||||
let dts_path = f.replace(".proto", "_pb.d.ts");
|
||||
[
|
||||
(self.out_path_transform)(&js_path),
|
||||
(self.out_path_transform)(&dts_path),
|
||||
]
|
||||
})
|
||||
.collect();
|
||||
|
||||
let stem = self.output_stem;
|
||||
build.add_variable("static", format!("$builddir/{stem}_static.js"));
|
||||
build.add_outputs("js", vec![format!("{stem}.js")]);
|
||||
build.add_outputs("dts", vec![format!("{stem}.d.ts")]);
|
||||
build.create_dir_all("out_dir", self.out_dir);
|
||||
build.add_variable(
|
||||
"includes",
|
||||
self.include_dirs
|
||||
.iter()
|
||||
.map(|d| format!("-I {d}"))
|
||||
.join(" "),
|
||||
);
|
||||
build.add_inputs("protoc", inputs![":extract:protoc:bin"]);
|
||||
build.add_inputs("gen-es", inputs![":node_modules:protoc-gen-es"]);
|
||||
if cfg!(windows) {
|
||||
build.add_env_var(
|
||||
"PATH",
|
||||
&format!("node_modules/.bin;{}", std::env::var("PATH").unwrap()),
|
||||
);
|
||||
}
|
||||
build.add_inputs_vec("in", proto_files);
|
||||
build.add_inputs("", inputs!["yarn.lock"]);
|
||||
build.add_inputs("pyenv_bin", inputs![":pyenv:bin"]);
|
||||
build.add_inputs("script", inputs![self.py_transform_script]);
|
||||
|
||||
build.add_outputs("", output_files);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -376,3 +408,43 @@ impl BuildAction for CompileSass<'_> {
|
|||
build.add_outputs("out", vec![self.output]);
|
||||
}
|
||||
}
|
||||
|
||||
/// Usually we rely on esbuild to transpile our .ts files on the fly, but when
|
||||
/// we want generated code to be able to import a .ts file, we need to use
|
||||
/// typescript to generate .js/.d.ts files, or types can't be looked up, and
|
||||
/// esbuild can't find the file to bundle.
|
||||
pub struct CompileTypescript<'a> {
|
||||
pub ts_files: BuildInput,
|
||||
/// Automatically created.
|
||||
pub out_dir: &'a str,
|
||||
/// Can be used to adjust the output js/dts files to point to out_dir.
|
||||
pub out_path_transform: fn(&str) -> String,
|
||||
}
|
||||
|
||||
impl BuildAction for CompileTypescript<'_> {
|
||||
fn command(&self) -> &str {
|
||||
"$tsc $in --outDir $out_dir -d --skipLibCheck"
|
||||
}
|
||||
|
||||
fn files(&mut self, build: &mut impl build::FilesHandle) {
|
||||
build.add_inputs("tsc", inputs![":node_modules:tsc"]);
|
||||
build.add_inputs("in", &self.ts_files);
|
||||
build.add_inputs("", inputs!["yarn.lock"]);
|
||||
|
||||
let ts_files = build.expand_inputs(&self.ts_files);
|
||||
let output_files: Vec<_> = ts_files
|
||||
.iter()
|
||||
.flat_map(|f| {
|
||||
let js_path = f.replace(".ts", ".js");
|
||||
let dts_path = f.replace(".ts", ".d.ts");
|
||||
[
|
||||
(self.out_path_transform)(&js_path),
|
||||
(self.out_path_transform)(&dts_path),
|
||||
]
|
||||
})
|
||||
.collect();
|
||||
|
||||
build.create_dir_all("out_dir", self.out_dir);
|
||||
build.add_outputs("", output_files);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -178,7 +178,7 @@ impl BuildAction for PythonFormat<'_> {
|
|||
|
||||
pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Result<()> {
|
||||
let isort_ini = &inputs![".isort.cfg"];
|
||||
build.add(
|
||||
build.add_action(
|
||||
&format!("check:format:python:{group}"),
|
||||
PythonFormat {
|
||||
inputs: &inputs,
|
||||
|
@ -187,7 +187,7 @@ pub fn python_format(build: &mut Build, group: &str, inputs: BuildInput) -> Resu
|
|||
},
|
||||
)?;
|
||||
|
||||
build.add(
|
||||
build.add_action(
|
||||
&format!("format:python:{group}"),
|
||||
PythonFormat {
|
||||
inputs: &inputs,
|
||||
|
|
|
@ -32,7 +32,7 @@ impl BuildAction for CompileSassWithGrass {
|
|||
}
|
||||
|
||||
fn on_first_instance(&self, build: &mut Build) -> Result<()> {
|
||||
build.add(
|
||||
build.add_action(
|
||||
"grass",
|
||||
CargoInstall {
|
||||
binary_name: "grass",
|
||||
|
|
|
@ -9,8 +9,11 @@ license.workspace = true
|
|||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
anki_io = { version = "0.0.0", path = "../../rslib/io" }
|
||||
anyhow = "1.0.71"
|
||||
camino = "1.1.4"
|
||||
clap = { version = "4.2.1", features = ["derive"] }
|
||||
itertools = "0.10.5"
|
||||
junction = "1.0.0"
|
||||
termcolor = "1.2.0"
|
||||
workspace-hack = { version = "0.1", path = "../../tools/workspace-hack" }
|
||||
|
|
|
@ -13,8 +13,7 @@ mod rsync;
|
|||
mod run;
|
||||
mod yarn;
|
||||
|
||||
use std::error::Error;
|
||||
|
||||
use anyhow::Result;
|
||||
use build::run_build;
|
||||
use build::BuildArgs;
|
||||
use bundle::artifacts::build_artifacts;
|
||||
|
@ -33,8 +32,6 @@ use run::RunArgs;
|
|||
use yarn::setup_yarn;
|
||||
use yarn::YarnArgs;
|
||||
|
||||
pub type Result<T, E = Box<dyn Error>> = std::result::Result<T, E>;
|
||||
|
||||
#[derive(Parser)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
|
@ -53,10 +50,10 @@ enum Command {
|
|||
BuildDistFolder(BuildDistFolderArgs),
|
||||
}
|
||||
|
||||
fn main() {
|
||||
fn main() -> Result<()> {
|
||||
match Cli::parse().command {
|
||||
Command::Pyenv(args) => setup_pyenv(args),
|
||||
Command::Run(args) => run_commands(args),
|
||||
Command::Run(args) => run_commands(args)?,
|
||||
Command::Rsync(args) => rsync_files(args),
|
||||
Command::Yarn(args) => setup_yarn(args),
|
||||
Command::Build(args) => run_build(args),
|
||||
|
@ -64,4 +61,5 @@ fn main() {
|
|||
Command::BuildBundleBinary => build_bundle_binary(),
|
||||
Command::BuildDistFolder(args) => build_dist_folder(args),
|
||||
};
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -5,6 +5,9 @@ use std::io::ErrorKind;
|
|||
use std::process::Command;
|
||||
use std::process::Output;
|
||||
|
||||
use anki_io::create_dir_all;
|
||||
use anki_io::write_file;
|
||||
use anyhow::Result;
|
||||
use clap::Args;
|
||||
|
||||
#[derive(Args)]
|
||||
|
@ -15,20 +18,26 @@ pub struct RunArgs {
|
|||
env: Vec<(String, String)>,
|
||||
#[arg(long)]
|
||||
cwd: Option<String>,
|
||||
#[arg(long)]
|
||||
mkdir: Vec<String>,
|
||||
#[arg(trailing_var_arg = true)]
|
||||
args: Vec<String>,
|
||||
}
|
||||
|
||||
/// Run one or more commands separated by `&&`, optionally stamping or setting
|
||||
/// extra env vars.
|
||||
pub fn run_commands(args: RunArgs) {
|
||||
pub fn run_commands(args: RunArgs) -> Result<()> {
|
||||
let commands = split_args(args.args);
|
||||
for dir in args.mkdir {
|
||||
create_dir_all(&dir)?;
|
||||
}
|
||||
for command in commands {
|
||||
run_silent(&mut build_command(command, &args.env, &args.cwd));
|
||||
}
|
||||
if let Some(stamp_file) = args.stamp {
|
||||
std::fs::write(stamp_file, b"").expect("unable to write stamp file");
|
||||
write_file(stamp_file, b"")?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn split_env(s: &str) -> Result<(String, String), std::io::Error> {
|
||||
|
|
|
@ -100,13 +100,9 @@ Protobuf has an official Python implementation with an extensive [reference](htt
|
|||
|
||||
### Typescript
|
||||
|
||||
Anki uses [protobuf.js](https://protobufjs.github.io/protobuf.js/), which offers
|
||||
Anki uses [protobuf-es](https://github.com/bufbuild/protobuf-es), which offers
|
||||
some documentation.
|
||||
|
||||
- If using a message `Foo` as a type, make sure not to use the generated interface
|
||||
`IFoo` instead. Their definitions are very similar, but the interface requires
|
||||
null checks for every field.
|
||||
|
||||
### Rust
|
||||
|
||||
Anki uses the [prost crate](https://docs.rs/prost/latest/prost/).
|
||||
|
|
24
package.json
24
package.json
|
@ -6,6 +6,7 @@
|
|||
"license": "AGPL-3.0-or-later",
|
||||
"description": "Anki JS support files",
|
||||
"devDependencies": {
|
||||
"@bufbuild/protoc-gen-es": "^1.2.1",
|
||||
"@pyoner/svelte-types": "^3.4.4-2",
|
||||
"@sqltools/formatter": "^1.2.2",
|
||||
"@types/bootstrap": "^5.0.12",
|
||||
|
@ -21,47 +22,34 @@
|
|||
"@typescript-eslint/eslint-plugin": "^4.22.0",
|
||||
"@typescript-eslint/parser": "^4.22.0",
|
||||
"caniuse-lite": "^1.0.30001431",
|
||||
"chalk": "^4.1.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"diff": "^5.0.0",
|
||||
"dprint": "^0.32.2",
|
||||
"esbuild": "^0.15.13",
|
||||
"esbuild-sass-plugin": "2",
|
||||
"esbuild-svelte": "^0.7.1",
|
||||
"escodegen": "^2.0.0",
|
||||
"eslint": "^7.24.0",
|
||||
"eslint-plugin-compat": "^3.13.0",
|
||||
"eslint-plugin-import": "^2.25.4",
|
||||
"eslint-plugin-simple-import-sort": "^7.0.0",
|
||||
"eslint-plugin-svelte3": "^3.4.0",
|
||||
"espree": "^9.0.0",
|
||||
"estraverse": "^5.2.0",
|
||||
"glob": "^7.1.6",
|
||||
"jest-cli": "^28.0.0-alpha.5",
|
||||
"jest-environment-jsdom": "^28.0.0-alpha.5",
|
||||
"license-checker-rseidelsohn": "^2.1.1",
|
||||
"minimist": "^1.2.5",
|
||||
"patch-package": "^6.4.7",
|
||||
"prettier": "2.4.1",
|
||||
"prettier-plugin-svelte": "2.6.0",
|
||||
"protobufjs-cli": "^1.0.2",
|
||||
"sass": "1.43.5",
|
||||
"semver": "^7.3.4",
|
||||
"svelte": "^3.25.0",
|
||||
"svelte-check": "^2.2.6",
|
||||
"svelte-preprocess": "^5.0.3",
|
||||
"svelte-preprocess-esbuild": "^3.0.1",
|
||||
"svelte2tsx": "^0.4.6",
|
||||
"tmp": "^0.2.1",
|
||||
"tslib": "^2.0.3",
|
||||
"tsx": "^3.12.0",
|
||||
"typescript": "^5.0.4",
|
||||
"uglify-js": "^3.13.1"
|
||||
},
|
||||
"scripts": {
|
||||
"postinstall": "patch-package --patch-dir ts/patches"
|
||||
"typescript": "^5.0.4"
|
||||
},
|
||||
"dependencies": {
|
||||
"@bufbuild/protobuf": "^1.2.1",
|
||||
"@floating-ui/dom": "^0.3.0",
|
||||
"@fluent/bundle": "^0.17.0",
|
||||
"@mdi/svg": "^7.0.96",
|
||||
|
@ -83,13 +71,9 @@
|
|||
"lodash-es": "^4.17.21",
|
||||
"marked": "^4.0.0",
|
||||
"mathjax": "^3.1.2",
|
||||
"panzoom": "^9.4.3",
|
||||
"protobufjs": "^7"
|
||||
"panzoom": "^9.4.3"
|
||||
},
|
||||
"resolutions": {
|
||||
"jsdoc/marked": "^4.0.0",
|
||||
"jsdoc/markdown-it": "^12.3.2",
|
||||
"protobufjs": "^7",
|
||||
"sass": "=1.45.0",
|
||||
"caniuse-lite": "^1.0.30001431"
|
||||
},
|
||||
|
|
|
@ -7,9 +7,7 @@ option java_multiple_files = true;
|
|||
|
||||
package anki.image_occlusion;
|
||||
|
||||
import "anki/cards.proto";
|
||||
import "anki/collection.proto";
|
||||
import "anki/notes.proto";
|
||||
import "anki/generic.proto";
|
||||
|
||||
service ImageOcclusionService {
|
||||
|
|
|
@ -38,6 +38,13 @@ service SchedulerService {
|
|||
rpc SortCards(SortCardsRequest) returns (collection.OpChangesWithCount);
|
||||
rpc SortDeck(SortDeckRequest) returns (collection.OpChangesWithCount);
|
||||
rpc GetSchedulingStates(cards.CardId) returns (SchedulingStates);
|
||||
// This should be implemented by the frontend, and should return the values
|
||||
// from the reviewer. The backend method will throw an error.
|
||||
rpc GetSchedulingStatesWithContext(generic.Empty)
|
||||
returns (SchedulingStatesWithContext);
|
||||
// This should be implemented by the frontend, and should update the state
|
||||
// data in the reviewer. The backend method will throw an error.
|
||||
rpc SetSchedulingStates(SetSchedulingStatesRequest) returns (generic.Empty);
|
||||
rpc DescribeNextStates(SchedulingStates) returns (generic.StringList);
|
||||
rpc StateIsLeech(SchedulingState) returns (generic.Bool);
|
||||
rpc UpgradeScheduler(generic.Empty) returns (generic.Empty);
|
||||
|
@ -67,7 +74,7 @@ message SchedulingState {
|
|||
Learning learning = 2;
|
||||
}
|
||||
message Normal {
|
||||
oneof value {
|
||||
oneof kind {
|
||||
New new = 1;
|
||||
Learning learning = 2;
|
||||
Review review = 3;
|
||||
|
@ -82,13 +89,13 @@ message SchedulingState {
|
|||
Normal original_state = 1;
|
||||
}
|
||||
message Filtered {
|
||||
oneof value {
|
||||
oneof kind {
|
||||
Preview preview = 1;
|
||||
ReschedulingFilter rescheduling = 2;
|
||||
}
|
||||
}
|
||||
|
||||
oneof value {
|
||||
oneof kind {
|
||||
Normal normal = 1;
|
||||
Filtered filtered = 2;
|
||||
}
|
||||
|
@ -318,3 +325,8 @@ message RepositionDefaultsResponse {
|
|||
bool random = 1;
|
||||
bool shift = 2;
|
||||
}
|
||||
|
||||
message SetSchedulingStatesRequest {
|
||||
string key = 1;
|
||||
SchedulingStates states = 2;
|
||||
}
|
||||
|
|
|
@ -32,6 +32,7 @@ SchedulingState = scheduler_pb2.SchedulingState
|
|||
SchedulingStates = scheduler_pb2.SchedulingStates
|
||||
SchedulingContext = scheduler_pb2.SchedulingContext
|
||||
SchedulingStatesWithContext = scheduler_pb2.SchedulingStatesWithContext
|
||||
SetSchedulingStatesRequest = scheduler_pb2.SetSchedulingStatesRequest
|
||||
CardAnswer = scheduler_pb2.CardAnswer
|
||||
|
||||
|
||||
|
@ -182,7 +183,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
|||
# fixme: move these into tests_schedv2 in the future
|
||||
|
||||
def _interval_for_state(self, state: scheduler_pb2.SchedulingState) -> int:
|
||||
kind = state.WhichOneof("value")
|
||||
kind = state.WhichOneof("kind")
|
||||
if kind == "normal":
|
||||
return self._interval_for_normal_state(state.normal)
|
||||
elif kind == "filtered":
|
||||
|
@ -194,7 +195,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
|||
def _interval_for_normal_state(
|
||||
self, normal: scheduler_pb2.SchedulingState.Normal
|
||||
) -> int:
|
||||
kind = normal.WhichOneof("value")
|
||||
kind = normal.WhichOneof("kind")
|
||||
if kind == "new":
|
||||
return 0
|
||||
elif kind == "review":
|
||||
|
@ -210,7 +211,7 @@ class Scheduler(SchedulerBaseWithLegacy):
|
|||
def _interval_for_filtered_state(
|
||||
self, filtered: scheduler_pb2.SchedulingState.Filtered
|
||||
) -> int:
|
||||
kind = filtered.WhichOneof("value")
|
||||
kind = filtered.WhichOneof("kind")
|
||||
if kind == "preview":
|
||||
return filtered.preview.scheduled_secs
|
||||
elif kind == "rescheduling":
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
# Copyright: Ankitects Pty Ltd and contributors
|
||||
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
root = sys.argv[1]
|
||||
|
||||
type_re = re.compile(r'(make(Enum|MessageType))\(\n\s+".*",')
|
||||
for dirpath, dirnames, filenames in os.walk(root):
|
||||
for filename in filenames:
|
||||
if filename.endswith(".js"):
|
||||
file = os.path.join(dirpath, filename)
|
||||
with open(file, "r", encoding="utf8") as f:
|
||||
contents = f.read()
|
||||
|
||||
# allow tree shaking on proto messages
|
||||
contents = contents.replace(
|
||||
"= proto3.make", "= /* @__PURE__ */ proto3.make"
|
||||
)
|
||||
# strip out typeName info, which appears to only be required for
|
||||
# certain JSON functionality (though this only saves a few hundred
|
||||
# bytes)
|
||||
contents = type_re.sub('\\1("",', contents)
|
||||
|
||||
with open(file, "w", encoding="utf8") as f:
|
||||
f.write(contents)
|
|
@ -27,8 +27,7 @@ import aqt.operations
|
|||
from anki import hooks
|
||||
from anki.collection import OpChanges
|
||||
from anki.decks import UpdateDeckConfigs
|
||||
from anki.scheduler.v3 import SchedulingStatesWithContext
|
||||
from anki.scheduler_pb2 import SchedulingStates
|
||||
from anki.scheduler.v3 import SchedulingStatesWithContext, SetSchedulingStatesRequest
|
||||
from anki.utils import dev_mode
|
||||
from aqt.changenotetype import ChangeNotetypeDialog
|
||||
from aqt.deckoptions import DeckOptionsDialog
|
||||
|
@ -416,10 +415,9 @@ def get_scheduling_states_with_context() -> bytes:
|
|||
|
||||
|
||||
def set_scheduling_states() -> bytes:
|
||||
key = request.headers.get("key", "")
|
||||
states = SchedulingStates()
|
||||
states = SetSchedulingStatesRequest()
|
||||
states.ParseFromString(request.data)
|
||||
aqt.mw.reviewer.set_scheduling_states(key, states)
|
||||
aqt.mw.reviewer.set_scheduling_states(states)
|
||||
return b""
|
||||
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import random
|
|||
import re
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum, auto
|
||||
from typing import Any, Callable, Literal, Match, Sequence, cast
|
||||
from typing import Any, Literal, Match, Sequence, cast
|
||||
|
||||
import aqt
|
||||
import aqt.browser
|
||||
|
@ -20,7 +20,11 @@ from anki.collection import Config, OpChanges, OpChangesWithCount
|
|||
from anki.scheduler.base import ScheduleCardsAsNew
|
||||
from anki.scheduler.v3 import CardAnswer, QueuedCards
|
||||
from anki.scheduler.v3 import Scheduler as V3Scheduler
|
||||
from anki.scheduler.v3 import SchedulingContext, SchedulingStates
|
||||
from anki.scheduler.v3 import (
|
||||
SchedulingContext,
|
||||
SchedulingStates,
|
||||
SetSchedulingStatesRequest,
|
||||
)
|
||||
from anki.tags import MARKED_TAG
|
||||
from anki.types import assert_exhaustive
|
||||
from aqt import AnkiQt, gui_hooks
|
||||
|
@ -276,12 +280,12 @@ class Reviewer:
|
|||
return v3.context
|
||||
return None
|
||||
|
||||
def set_scheduling_states(self, key: str, states: SchedulingStates) -> None:
|
||||
if key != self._state_mutation_key:
|
||||
def set_scheduling_states(self, request: SetSchedulingStatesRequest) -> None:
|
||||
if request.key != self._state_mutation_key:
|
||||
return
|
||||
|
||||
if v3 := self._v3:
|
||||
v3.states = states
|
||||
v3.states = request.states
|
||||
|
||||
def _run_state_mutation_hook(self) -> None:
|
||||
def on_eval(result: Any) -> None:
|
||||
|
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
pub mod python;
|
||||
pub mod rust;
|
||||
pub mod ts;
|
||||
pub mod utils;
|
||||
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
@ -15,5 +17,7 @@ fn main() -> Result<()> {
|
|||
|
||||
let pool = rust::write_backend_proto_rs(&descriptors_path)?;
|
||||
python::write_python_interface(&pool)?;
|
||||
ts::write_ts_interface(&pool)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -0,0 +1,204 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::fmt::Write as WriteFmt;
|
||||
use std::io::BufWriter;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
|
||||
use anki_io::create_dir_all;
|
||||
use anki_io::create_file;
|
||||
use anyhow::Result;
|
||||
use inflections::Inflect;
|
||||
use prost_reflect::DescriptorPool;
|
||||
use prost_reflect::MethodDescriptor;
|
||||
use prost_reflect::ServiceDescriptor;
|
||||
|
||||
use crate::utils::Comments;
|
||||
|
||||
pub(crate) fn write_ts_interface(pool: &DescriptorPool) -> Result<()> {
|
||||
let root = Path::new("../../out/ts/lib/anki");
|
||||
create_dir_all(root)?;
|
||||
|
||||
for service in pool.services() {
|
||||
if service.name() == "AnkidroidService" {
|
||||
continue;
|
||||
}
|
||||
let service_name = service.name().replace("Service", "").to_snake_case();
|
||||
let comments = Comments::from_file(service.parent_file().file_descriptor_proto());
|
||||
|
||||
write_dts_file(root, &service_name, &service, &comments)?;
|
||||
write_js_file(root, &service_name, &service, &comments)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_dts_file(
|
||||
root: &Path,
|
||||
service_name: &str,
|
||||
service: &ServiceDescriptor,
|
||||
comments: &Comments,
|
||||
) -> Result<()> {
|
||||
let output_path = root.join(format!("{service_name}_service.d.ts"));
|
||||
let mut out = BufWriter::new(create_file(output_path)?);
|
||||
write_dts_header(&mut out)?;
|
||||
|
||||
let mut referenced_packages = HashSet::new();
|
||||
let mut method_text = String::new();
|
||||
for method in service.methods() {
|
||||
let method = MethodDetails::from_descriptor(&method, comments);
|
||||
record_referenced_type(&mut referenced_packages, &method.input_type)?;
|
||||
record_referenced_type(&mut referenced_packages, &method.output_type)?;
|
||||
write_dts_method(&method, &mut method_text)?;
|
||||
}
|
||||
|
||||
write_imports(referenced_packages, &mut out)?;
|
||||
write!(out, "{}", method_text)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_dts_header(out: &mut impl std::io::Write) -> Result<()> {
|
||||
out.write_all(
|
||||
br#"// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; https://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { PlainMessage } from "@bufbuild/protobuf";
|
||||
import type { PostProtoOptions } from "../post";
|
||||
"#,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_imports(referenced_packages: HashSet<String>, out: &mut impl Write) -> Result<()> {
|
||||
for package in referenced_packages {
|
||||
writeln!(
|
||||
out,
|
||||
"import * as {} from \"./{}_pb\";",
|
||||
package,
|
||||
package.to_snake_case()
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_dts_method(
|
||||
MethodDetails {
|
||||
method_name,
|
||||
input_type,
|
||||
output_type,
|
||||
comments,
|
||||
}: &MethodDetails,
|
||||
out: &mut String,
|
||||
) -> Result<()> {
|
||||
let comments = format_comments(comments);
|
||||
writeln!(
|
||||
out,
|
||||
r#"{comments}export declare function {method_name}(input: PlainMessage<{input_type}>, options?: PostProtoOptions): Promise<{output_type}>;"#
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_js_file(
|
||||
root: &Path,
|
||||
service_name: &str,
|
||||
service: &ServiceDescriptor,
|
||||
comments: &Comments,
|
||||
) -> Result<()> {
|
||||
let output_path = root.join(format!("{service_name}_service.js"));
|
||||
let mut out = BufWriter::new(create_file(output_path)?);
|
||||
write_js_header(&mut out)?;
|
||||
|
||||
let mut referenced_packages = HashSet::new();
|
||||
let mut method_text = String::new();
|
||||
for method in service.methods() {
|
||||
let method = MethodDetails::from_descriptor(&method, comments);
|
||||
record_referenced_type(&mut referenced_packages, &method.input_type)?;
|
||||
record_referenced_type(&mut referenced_packages, &method.output_type)?;
|
||||
write_js_method(&method, &mut method_text)?;
|
||||
}
|
||||
|
||||
write_imports(referenced_packages, &mut out)?;
|
||||
write!(out, "{}", method_text)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_js_header(out: &mut impl std::io::Write) -> Result<()> {
|
||||
out.write_all(
|
||||
br#"// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; https://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import { postProto } from "../post";
|
||||
"#,
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn write_js_method(
|
||||
MethodDetails {
|
||||
method_name,
|
||||
input_type,
|
||||
output_type,
|
||||
..
|
||||
}: &MethodDetails,
|
||||
out: &mut String,
|
||||
) -> Result<()> {
|
||||
write!(
|
||||
out,
|
||||
r#"export async function {method_name}(input, options = {{}}) {{
|
||||
return await postProto("{method_name}", new {input_type}(input), {output_type}, options);
|
||||
}}
|
||||
"#
|
||||
)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn format_comments(comments: &Option<String>) -> String {
|
||||
comments
|
||||
.as_ref()
|
||||
.map(|s| format!("/** {s} */\n"))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
struct MethodDetails {
|
||||
method_name: String,
|
||||
input_type: String,
|
||||
output_type: String,
|
||||
comments: Option<String>,
|
||||
}
|
||||
|
||||
impl MethodDetails {
|
||||
fn from_descriptor(method: &MethodDescriptor, comments: &Comments) -> MethodDetails {
|
||||
let name = method.name().to_camel_case();
|
||||
let input_type = full_name_to_imported_reference(method.input().full_name());
|
||||
let output_type = full_name_to_imported_reference(method.output().full_name());
|
||||
let comments = comments.get_for_path(method.path());
|
||||
Self {
|
||||
method_name: name,
|
||||
input_type,
|
||||
output_type,
|
||||
comments: comments.map(ToString::to_string),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn record_referenced_type(
|
||||
referenced_packages: &mut HashSet<String>,
|
||||
type_name: &str,
|
||||
) -> Result<()> {
|
||||
referenced_packages.insert(type_name.split('.').next().unwrap().to_string());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// e.g. anki.import_export.ImportResponse ->
|
||||
// importExport.ImportResponse
|
||||
fn full_name_to_imported_reference(name: &str) -> String {
|
||||
let mut name = name.splitn(3, '.');
|
||||
name.next().unwrap();
|
||||
format!(
|
||||
"{}.{}",
|
||||
name.next().unwrap().to_camel_case(),
|
||||
name.next().unwrap()
|
||||
)
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use prost_types::FileDescriptorProto;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Comments {
|
||||
path_map: HashMap<Vec<i32>, String>,
|
||||
}
|
||||
|
||||
impl Comments {
|
||||
pub fn from_file(file: &FileDescriptorProto) -> Self {
|
||||
Self {
|
||||
path_map: file
|
||||
.source_code_info
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.location
|
||||
.iter()
|
||||
.map(|l| {
|
||||
(
|
||||
l.path.clone(),
|
||||
format!(
|
||||
"{}{}",
|
||||
l.leading_detached_comments.join("\n").trim(),
|
||||
l.leading_comments().trim()
|
||||
),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_for_path(&self, path: &[i32]) -> Option<&str> {
|
||||
self.path_map.get(path).map(|s| s.as_str()).and_then(|s| {
|
||||
if s.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(s)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
|
@ -5,8 +5,11 @@ mod answering;
|
|||
mod states;
|
||||
|
||||
use anki_proto::generic;
|
||||
use anki_proto::generic::Empty;
|
||||
use anki_proto::scheduler;
|
||||
pub(super) use anki_proto::scheduler::scheduler_service::Service as SchedulerService;
|
||||
use anki_proto::scheduler::SchedulingStatesWithContext;
|
||||
use anki_proto::scheduler::SetSchedulingStatesRequest;
|
||||
|
||||
use super::Backend;
|
||||
use crate::prelude::*;
|
||||
|
@ -264,4 +267,18 @@ impl SchedulerService for Backend {
|
|||
) -> Result<scheduler::CustomStudyDefaultsResponse> {
|
||||
self.with_col(|col| col.custom_study_defaults(input.deck_id.into()))
|
||||
}
|
||||
|
||||
fn get_scheduling_states_with_context(
|
||||
&self,
|
||||
_input: Empty,
|
||||
) -> std::result::Result<SchedulingStatesWithContext, Self::Error> {
|
||||
invalid_input!("the frontend should implement this")
|
||||
}
|
||||
|
||||
fn set_scheduling_states(
|
||||
&self,
|
||||
_input: SetSchedulingStatesRequest,
|
||||
) -> std::result::Result<Empty, Self::Error> {
|
||||
invalid_input!("the frontend should implement this")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,12 +6,12 @@ use crate::scheduler::states::FilteredState;
|
|||
impl From<FilteredState> for anki_proto::scheduler::scheduling_state::Filtered {
|
||||
fn from(state: FilteredState) -> Self {
|
||||
anki_proto::scheduler::scheduling_state::Filtered {
|
||||
value: Some(match state {
|
||||
kind: Some(match state {
|
||||
FilteredState::Preview(state) => {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state.into())
|
||||
anki_proto::scheduler::scheduling_state::filtered::Kind::Preview(state.into())
|
||||
}
|
||||
FilteredState::Rescheduling(state) => {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling(
|
||||
anki_proto::scheduler::scheduling_state::filtered::Kind::Rescheduling(
|
||||
state.into(),
|
||||
)
|
||||
}
|
||||
|
@ -22,13 +22,13 @@ impl From<FilteredState> for anki_proto::scheduler::scheduling_state::Filtered {
|
|||
|
||||
impl From<anki_proto::scheduler::scheduling_state::Filtered> for FilteredState {
|
||||
fn from(state: anki_proto::scheduler::scheduling_state::Filtered) -> Self {
|
||||
match state.value.unwrap_or_else(|| {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Value::Preview(Default::default())
|
||||
match state.kind.unwrap_or_else(|| {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Kind::Preview(Default::default())
|
||||
}) {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Value::Preview(state) => {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Kind::Preview(state) => {
|
||||
FilteredState::Preview(state.into())
|
||||
}
|
||||
anki_proto::scheduler::scheduling_state::filtered::Value::Rescheduling(state) => {
|
||||
anki_proto::scheduler::scheduling_state::filtered::Kind::Rescheduling(state) => {
|
||||
FilteredState::Rescheduling(state.into())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -42,12 +42,12 @@ impl From<anki_proto::scheduler::SchedulingStates> for SchedulingStates {
|
|||
impl From<CardState> for anki_proto::scheduler::SchedulingState {
|
||||
fn from(state: CardState) -> Self {
|
||||
anki_proto::scheduler::SchedulingState {
|
||||
value: Some(match state {
|
||||
kind: Some(match state {
|
||||
CardState::Normal(state) => {
|
||||
anki_proto::scheduler::scheduling_state::Value::Normal(state.into())
|
||||
anki_proto::scheduler::scheduling_state::Kind::Normal(state.into())
|
||||
}
|
||||
CardState::Filtered(state) => {
|
||||
anki_proto::scheduler::scheduling_state::Value::Filtered(state.into())
|
||||
anki_proto::scheduler::scheduling_state::Kind::Filtered(state.into())
|
||||
}
|
||||
}),
|
||||
custom_data: None,
|
||||
|
@ -57,12 +57,12 @@ impl From<CardState> for anki_proto::scheduler::SchedulingState {
|
|||
|
||||
impl From<anki_proto::scheduler::SchedulingState> for CardState {
|
||||
fn from(state: anki_proto::scheduler::SchedulingState) -> Self {
|
||||
if let Some(value) = state.value {
|
||||
if let Some(value) = state.kind {
|
||||
match value {
|
||||
anki_proto::scheduler::scheduling_state::Value::Normal(normal) => {
|
||||
anki_proto::scheduler::scheduling_state::Kind::Normal(normal) => {
|
||||
CardState::Normal(normal.into())
|
||||
}
|
||||
anki_proto::scheduler::scheduling_state::Value::Filtered(filtered) => {
|
||||
anki_proto::scheduler::scheduling_state::Kind::Filtered(filtered) => {
|
||||
CardState::Filtered(filtered.into())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,18 +6,18 @@ use crate::scheduler::states::NormalState;
|
|||
impl From<NormalState> for anki_proto::scheduler::scheduling_state::Normal {
|
||||
fn from(state: NormalState) -> Self {
|
||||
anki_proto::scheduler::scheduling_state::Normal {
|
||||
value: Some(match state {
|
||||
kind: Some(match state {
|
||||
NormalState::New(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::New(state.into())
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::New(state.into())
|
||||
}
|
||||
NormalState::Learning(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::Learning(state.into())
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::Learning(state.into())
|
||||
}
|
||||
NormalState::Review(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::Review(state.into())
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::Review(state.into())
|
||||
}
|
||||
NormalState::Relearning(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state.into())
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::Relearning(state.into())
|
||||
}
|
||||
}),
|
||||
}
|
||||
|
@ -26,19 +26,19 @@ impl From<NormalState> for anki_proto::scheduler::scheduling_state::Normal {
|
|||
|
||||
impl From<anki_proto::scheduler::scheduling_state::Normal> for NormalState {
|
||||
fn from(state: anki_proto::scheduler::scheduling_state::Normal) -> Self {
|
||||
match state.value.unwrap_or_else(|| {
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::New(Default::default())
|
||||
match state.kind.unwrap_or_else(|| {
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::New(Default::default())
|
||||
}) {
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::New(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::New(state) => {
|
||||
NormalState::New(state.into())
|
||||
}
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::Learning(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::Learning(state) => {
|
||||
NormalState::Learning(state.into())
|
||||
}
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::Review(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::Review(state) => {
|
||||
NormalState::Review(state.into())
|
||||
}
|
||||
anki_proto::scheduler::scheduling_state::normal::Value::Relearning(state) => {
|
||||
anki_proto::scheduler::scheduling_state::normal::Kind::Relearning(state) => {
|
||||
NormalState::Relearning(state.into())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -78,11 +78,6 @@ samp {
|
|||
unicode-bidi: normal !important;
|
||||
}
|
||||
|
||||
.reduce-motion * {
|
||||
transition: none !important;
|
||||
animation: none !important;
|
||||
}
|
||||
|
||||
label,
|
||||
input[type="radio"],
|
||||
input[type="checkbox"] {
|
||||
|
|
|
@ -3,4 +3,4 @@
|
|||
# The pages can be accessed by, eg surfing to
|
||||
# http://localhost:40000/_anki/pages/deckconfig.html
|
||||
|
||||
QTWEBENGINE_REMOTE_DEBUGGING=8080 ANKI_API_PORT=40000 SOURCEMAP=1 ./run $*
|
||||
QTWEBENGINE_REMOTE_DEBUGGING=8080 ANKI_API_PORT=40000 ./run $*
|
||||
|
|
|
@ -18,7 +18,7 @@ if (page_html != null) {
|
|||
}
|
||||
|
||||
// support Qt 5.14
|
||||
const target = ["es6", "chrome77"];
|
||||
const target = ["es2020", "chrome77"];
|
||||
const inlineCss = bundle_css == null;
|
||||
const sourcemap = env.SOURCEMAP && true;
|
||||
let sveltePlugins;
|
||||
|
|
|
@ -3,8 +3,11 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { Cards, stats as statsService } from "@tslib/proto";
|
||||
import type {
|
||||
CardStatsResponse,
|
||||
CardStatsResponse_StatsRevlogEntry,
|
||||
} from "@tslib/anki/stats_pb";
|
||||
import { cardStats } from "@tslib/anki/stats_service";
|
||||
|
||||
import Container from "../components/Container.svelte";
|
||||
import Row from "../components/Row.svelte";
|
||||
|
@ -14,10 +17,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
export let includeRevlog: boolean = true;
|
||||
|
||||
let stats: Stats.CardStatsResponse | null = null;
|
||||
let revlog: Stats.CardStatsResponse.StatsRevlogEntry[] | null = null;
|
||||
let stats: CardStatsResponse | null = null;
|
||||
let revlog: CardStatsResponse_StatsRevlogEntry[] | null = null;
|
||||
|
||||
export async function updateStats(cardId: number | null): Promise<void> {
|
||||
export async function updateStats(cardId: bigint | null): Promise<void> {
|
||||
const requestedCardId = cardId;
|
||||
|
||||
if (cardId === null) {
|
||||
|
@ -26,16 +29,14 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
return;
|
||||
}
|
||||
|
||||
const cardStats = await statsService.cardStats(
|
||||
Cards.CardId.create({ cid: requestedCardId }),
|
||||
);
|
||||
const updatedStats = await cardStats({ cid: cardId });
|
||||
|
||||
/* Skip if another update has been triggered in the meantime. */
|
||||
if (requestedCardId === cardId) {
|
||||
stats = cardStats;
|
||||
stats = updatedStats;
|
||||
|
||||
if (includeRevlog) {
|
||||
revlog = stats.revlog as Stats.CardStatsResponse.StatsRevlogEntry[];
|
||||
revlog = stats.revlog;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,22 +3,22 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { CardStatsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr2 from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { DAY, timeSpan, Timestamp } from "@tslib/time";
|
||||
|
||||
export let stats: Stats.CardStatsResponse;
|
||||
export let stats: CardStatsResponse;
|
||||
|
||||
function dateString(timestamp: number): string {
|
||||
return new Timestamp(timestamp).dateString();
|
||||
function dateString(timestamp: bigint): string {
|
||||
return new Timestamp(Number(timestamp)).dateString();
|
||||
}
|
||||
|
||||
interface StatsRow {
|
||||
label: string;
|
||||
value: string | number;
|
||||
value: string | number | bigint;
|
||||
}
|
||||
|
||||
function rowsFromStats(stats: Stats.CardStatsResponse): StatsRow[] {
|
||||
function rowsFromStats(stats: CardStatsResponse): StatsRow[] {
|
||||
const statsRows: StatsRow[] = [];
|
||||
|
||||
statsRows.push({ label: tr2.cardStatsAdded(), value: dateString(stats.added) });
|
||||
|
|
|
@ -3,42 +3,41 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { CardStatsResponse_StatsRevlogEntry as RevlogEntry } from "@tslib/anki/stats_pb";
|
||||
import { RevlogEntry_ReviewKind as ReviewKind } from "@tslib/anki/stats_pb";
|
||||
import * as tr2 from "@tslib/ftl";
|
||||
import { Stats } from "@tslib/proto";
|
||||
import { timeSpan, Timestamp } from "@tslib/time";
|
||||
|
||||
type StatsRevlogEntry = Stats.CardStatsResponse.StatsRevlogEntry;
|
||||
export let revlog: RevlogEntry[];
|
||||
|
||||
export let revlog: StatsRevlogEntry[];
|
||||
|
||||
function reviewKindClass(entry: StatsRevlogEntry): string {
|
||||
function reviewKindClass(entry: RevlogEntry): string {
|
||||
switch (entry.reviewKind) {
|
||||
case Stats.RevlogEntry.ReviewKind.LEARNING:
|
||||
case ReviewKind.LEARNING:
|
||||
return "revlog-learn";
|
||||
case Stats.RevlogEntry.ReviewKind.REVIEW:
|
||||
case ReviewKind.REVIEW:
|
||||
return "revlog-review";
|
||||
case Stats.RevlogEntry.ReviewKind.RELEARNING:
|
||||
case ReviewKind.RELEARNING:
|
||||
return "revlog-relearn";
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
function reviewKindLabel(entry: StatsRevlogEntry): string {
|
||||
function reviewKindLabel(entry: RevlogEntry): string {
|
||||
switch (entry.reviewKind) {
|
||||
case Stats.RevlogEntry.ReviewKind.LEARNING:
|
||||
case ReviewKind.LEARNING:
|
||||
return tr2.cardStatsReviewLogTypeLearn();
|
||||
case Stats.RevlogEntry.ReviewKind.REVIEW:
|
||||
case ReviewKind.REVIEW:
|
||||
return tr2.cardStatsReviewLogTypeReview();
|
||||
case Stats.RevlogEntry.ReviewKind.RELEARNING:
|
||||
case ReviewKind.RELEARNING:
|
||||
return tr2.cardStatsReviewLogTypeRelearn();
|
||||
case Stats.RevlogEntry.ReviewKind.FILTERED:
|
||||
case ReviewKind.FILTERED:
|
||||
return tr2.cardStatsReviewLogTypeFiltered();
|
||||
case Stats.RevlogEntry.ReviewKind.MANUAL:
|
||||
case ReviewKind.MANUAL:
|
||||
return tr2.cardStatsReviewLogTypeManual();
|
||||
}
|
||||
}
|
||||
|
||||
function ratingClass(entry: StatsRevlogEntry): string {
|
||||
function ratingClass(entry: RevlogEntry): string {
|
||||
if (entry.buttonChosen === 1) {
|
||||
return "revlog-ease1";
|
||||
}
|
||||
|
@ -57,8 +56,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
takenSecs: string;
|
||||
}
|
||||
|
||||
function revlogRowFromEntry(entry: StatsRevlogEntry): RevlogRow {
|
||||
const timestamp = new Timestamp(entry.time);
|
||||
function revlogRowFromEntry(entry: RevlogEntry): RevlogRow {
|
||||
const timestamp = new Timestamp(Number(entry.time));
|
||||
|
||||
return {
|
||||
date: timestamp.dateString(),
|
||||
|
|
|
@ -26,6 +26,6 @@ if (window.location.hash.startsWith("#test")) {
|
|||
// use #testXXXX where XXXX is card ID to test
|
||||
const cardId = parseInt(window.location.hash.substring(0, "#test".length), 10);
|
||||
setupCardInfo(document.body).then(
|
||||
(cardInfo: CardInfo): Promise<void> => cardInfo.updateStats(cardId),
|
||||
(cardInfo: CardInfo): Promise<void> => cardInfo.updateStats(BigInt(cardId)),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -3,28 +3,26 @@
|
|||
|
||||
import "./change-notetype-base.scss";
|
||||
|
||||
import { getChangeNotetypeInfo, getNotetypeNames } from "@tslib/anki/notetypes_service";
|
||||
import { ModuleName, setupI18n } from "@tslib/i18n";
|
||||
import { checkNightMode } from "@tslib/nightmode";
|
||||
import { empty, Notetypes, notetypes } from "@tslib/proto";
|
||||
|
||||
import ChangeNotetypePage from "./ChangeNotetypePage.svelte";
|
||||
import { ChangeNotetypeState } from "./lib";
|
||||
|
||||
const notetypeNames = notetypes.getNotetypeNames(empty);
|
||||
const notetypeNames = getNotetypeNames({});
|
||||
const i18n = setupI18n({
|
||||
modules: [ModuleName.ACTIONS, ModuleName.CHANGE_NOTETYPE, ModuleName.KEYBOARD],
|
||||
});
|
||||
|
||||
export async function setupChangeNotetypePage(
|
||||
oldNotetypeId: number,
|
||||
newNotetypeId: number,
|
||||
oldNotetypeId: bigint,
|
||||
newNotetypeId: bigint,
|
||||
): Promise<ChangeNotetypePage> {
|
||||
const changeNotetypeInfo = notetypes.getChangeNotetypeInfo(
|
||||
Notetypes.GetChangeNotetypeInfoRequest.create({
|
||||
oldNotetypeId,
|
||||
newNotetypeId,
|
||||
}),
|
||||
);
|
||||
const changeNotetypeInfo = getChangeNotetypeInfo({
|
||||
oldNotetypeId,
|
||||
newNotetypeId,
|
||||
});
|
||||
const [names, info] = await Promise.all([notetypeNames, changeNotetypeInfo, i18n]);
|
||||
|
||||
checkNightMode();
|
||||
|
@ -39,5 +37,5 @@ export async function setupChangeNotetypePage(
|
|||
// use #testXXXX where XXXX is notetype ID to test
|
||||
if (window.location.hash.startsWith("#test")) {
|
||||
const ntid = parseInt(window.location.hash.substring("#test".length), 10);
|
||||
setupChangeNotetypePage(ntid, ntid);
|
||||
setupChangeNotetypePage(BigInt(ntid), BigInt(ntid));
|
||||
}
|
||||
|
|
|
@ -7,8 +7,8 @@
|
|||
|
||||
import "@tslib/i18n";
|
||||
|
||||
import { ChangeNotetypeInfo, NotetypeNames } from "@tslib/anki/notetypes_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { Notetypes } from "@tslib/proto";
|
||||
import { get } from "svelte/store";
|
||||
|
||||
import { ChangeNotetypeState, MapContext, negativeOneToNull } from "./lib";
|
||||
|
@ -16,23 +16,23 @@ import { ChangeNotetypeState, MapContext, negativeOneToNull } from "./lib";
|
|||
const exampleNames = {
|
||||
entries: [
|
||||
{
|
||||
id: "1623289129847",
|
||||
id: 1623289129847n,
|
||||
name: "Basic",
|
||||
},
|
||||
{
|
||||
id: "1623289129848",
|
||||
id: 1623289129848n,
|
||||
name: "Basic (and reversed card)",
|
||||
},
|
||||
{
|
||||
id: "1623289129849",
|
||||
id: 1623289129849n,
|
||||
name: "Basic (optional reversed card)",
|
||||
},
|
||||
{
|
||||
id: "1623289129850",
|
||||
id: 1623289129850n,
|
||||
name: "Basic (type in the answer)",
|
||||
},
|
||||
{
|
||||
id: "1623289129851",
|
||||
id: 1623289129851n,
|
||||
name: "Cloze",
|
||||
},
|
||||
],
|
||||
|
@ -46,9 +46,9 @@ const exampleInfoDifferent = {
|
|||
input: {
|
||||
newFields: [0, 1, -1],
|
||||
newTemplates: [0, -1],
|
||||
oldNotetypeId: "1623289129847",
|
||||
newNotetypeId: "1623289129849",
|
||||
currentSchema: "1623302002316",
|
||||
oldNotetypeId: 1623289129847n,
|
||||
newNotetypeId: 1623289129849n,
|
||||
currentSchema: 1623302002316n,
|
||||
oldNotetypeName: "Basic",
|
||||
},
|
||||
};
|
||||
|
@ -61,24 +61,24 @@ const exampleInfoSame = {
|
|||
input: {
|
||||
newFields: [0, 1],
|
||||
newTemplates: [0],
|
||||
oldNotetypeId: "1623289129847",
|
||||
newNotetypeId: "1623289129847",
|
||||
currentSchema: "1623302002316",
|
||||
oldNotetypeId: 1623289129847n,
|
||||
newNotetypeId: 1623289129847n,
|
||||
currentSchema: 1623302002316n,
|
||||
oldNotetypeName: "Basic",
|
||||
},
|
||||
};
|
||||
|
||||
function differentState(): ChangeNotetypeState {
|
||||
return new ChangeNotetypeState(
|
||||
Notetypes.NotetypeNames.fromObject(exampleNames),
|
||||
Notetypes.ChangeNotetypeInfo.fromObject(exampleInfoDifferent),
|
||||
new NotetypeNames(exampleNames),
|
||||
new ChangeNotetypeInfo(exampleInfoDifferent),
|
||||
);
|
||||
}
|
||||
|
||||
function sameState(): ChangeNotetypeState {
|
||||
return new ChangeNotetypeState(
|
||||
Notetypes.NotetypeNames.fromObject(exampleNames),
|
||||
Notetypes.ChangeNotetypeInfo.fromObject(exampleInfoSame),
|
||||
new NotetypeNames(exampleNames),
|
||||
new ChangeNotetypeInfo(exampleInfoSame),
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { ChangeNotetypeInfo, ChangeNotetypeRequest, NotetypeNames } from "@tslib/anki/notetypes_pb";
|
||||
import { changeNotetype, getChangeNotetypeInfo } from "@tslib/anki/notetypes_service";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { Notetypes, notetypes } from "@tslib/proto";
|
||||
import { isEqual } from "lodash-es";
|
||||
import type { Readable } from "svelte/store";
|
||||
import { readable } from "svelte/store";
|
||||
|
@ -21,9 +22,9 @@ export class ChangeNotetypeInfoWrapper {
|
|||
fields: (number | null)[];
|
||||
templates?: (number | null)[];
|
||||
oldNotetypeName: string;
|
||||
readonly info: Notetypes.ChangeNotetypeInfo;
|
||||
readonly info: ChangeNotetypeInfo;
|
||||
|
||||
constructor(info: Notetypes.ChangeNotetypeInfo) {
|
||||
constructor(info: ChangeNotetypeInfo) {
|
||||
this.info = info;
|
||||
const templates = info.input?.newTemplates ?? [];
|
||||
if (templates.length > 0) {
|
||||
|
@ -89,13 +90,13 @@ export class ChangeNotetypeInfoWrapper {
|
|||
);
|
||||
}
|
||||
|
||||
input(): Notetypes.ChangeNotetypeRequest {
|
||||
return this.info.input as Notetypes.ChangeNotetypeRequest;
|
||||
input(): ChangeNotetypeRequest {
|
||||
return this.info.input as ChangeNotetypeRequest;
|
||||
}
|
||||
|
||||
/** Pack changes back into input message for saving. */
|
||||
intoInput(): Notetypes.ChangeNotetypeRequest {
|
||||
const input = this.info.input as Notetypes.ChangeNotetypeRequest;
|
||||
intoInput(): ChangeNotetypeRequest {
|
||||
const input = this.info.input as ChangeNotetypeRequest;
|
||||
input.newFields = nullToNegativeOne(this.fields);
|
||||
if (this.templates) {
|
||||
input.newTemplates = nullToNegativeOne(this.templates);
|
||||
|
@ -122,12 +123,12 @@ export class ChangeNotetypeState {
|
|||
|
||||
private info_: ChangeNotetypeInfoWrapper;
|
||||
private infoSetter!: (val: ChangeNotetypeInfoWrapper) => void;
|
||||
private notetypeNames: Notetypes.NotetypeNames;
|
||||
private notetypeNames: NotetypeNames;
|
||||
private notetypesSetter!: (val: NotetypeListEntry[]) => void;
|
||||
|
||||
constructor(
|
||||
notetypes: Notetypes.NotetypeNames,
|
||||
info: Notetypes.ChangeNotetypeInfo,
|
||||
notetypes: NotetypeNames,
|
||||
info: ChangeNotetypeInfo,
|
||||
) {
|
||||
this.info_ = new ChangeNotetypeInfoWrapper(info);
|
||||
this.info = readable(this.info_, (set) => {
|
||||
|
@ -144,13 +145,10 @@ export class ChangeNotetypeState {
|
|||
this.info_.input().newNotetypeId = this.notetypeNames.entries[idx].id!;
|
||||
this.notetypesSetter(this.buildNotetypeList());
|
||||
const { oldNotetypeId, newNotetypeId } = this.info_.input();
|
||||
const newInfo = await notetypes.getChangeNotetypeInfo(
|
||||
Notetypes.GetChangeNotetypeInfoRequest.create({
|
||||
oldNotetypeId,
|
||||
newNotetypeId,
|
||||
}),
|
||||
);
|
||||
|
||||
const newInfo = await getChangeNotetypeInfo({
|
||||
oldNotetypeId,
|
||||
newNotetypeId,
|
||||
});
|
||||
this.info_ = new ChangeNotetypeInfoWrapper(newInfo);
|
||||
this.info_.unusedItems(MapContext.Field);
|
||||
this.infoSetter(this.info_);
|
||||
|
@ -175,7 +173,7 @@ export class ChangeNotetypeState {
|
|||
this.infoSetter(this.info_);
|
||||
}
|
||||
|
||||
dataForSaving(): Notetypes.ChangeNotetypeRequest {
|
||||
dataForSaving(): ChangeNotetypeRequest {
|
||||
return this.info_.intoInput();
|
||||
}
|
||||
|
||||
|
@ -184,7 +182,7 @@ export class ChangeNotetypeState {
|
|||
alert("No changes to save");
|
||||
return;
|
||||
}
|
||||
await notetypes.changeNotetype(this.dataForSaving());
|
||||
await changeNotetype(this.dataForSaving());
|
||||
}
|
||||
|
||||
private buildNotetypeList(): NotetypeListEntry[] {
|
||||
|
|
|
@ -12,6 +12,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import Popover from "./Popover.svelte";
|
||||
import WithFloating from "./WithFloating.svelte";
|
||||
|
||||
type T = $$Generic;
|
||||
|
||||
export let id: string | undefined = undefined;
|
||||
|
||||
let className = "";
|
||||
|
@ -19,11 +21,11 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
export let disabled = false;
|
||||
export let label = "<br>";
|
||||
export let value = 0;
|
||||
export let value: T;
|
||||
|
||||
const dispatch = createEventDispatcher();
|
||||
|
||||
function setValue(v: number) {
|
||||
function setValue(v: T) {
|
||||
value = v;
|
||||
dispatch("change", { value });
|
||||
}
|
||||
|
|
|
@ -9,8 +9,10 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import { selectKey } from "./context-keys";
|
||||
import DropdownItem from "./DropdownItem.svelte";
|
||||
|
||||
type T = $$Generic;
|
||||
|
||||
export let disabled = false;
|
||||
export let value: number;
|
||||
export let value: T;
|
||||
|
||||
let element: HTMLButtonElement;
|
||||
|
||||
|
@ -40,7 +42,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
}
|
||||
}
|
||||
|
||||
const selectContext: Writable<{ value: number; setValue: Function }> =
|
||||
const selectContext: Writable<{ value: T; setValue: Function }> =
|
||||
getContext(selectKey);
|
||||
const setValue = $selectContext.setValue;
|
||||
</script>
|
||||
|
|
|
@ -3,15 +3,15 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { CongratsInfoResponse } from "@tslib/anki/scheduler_pb";
|
||||
import { bridgeLink } from "@tslib/bridgecommand";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Scheduler } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Container from "../components/Container.svelte";
|
||||
import { buildNextLearnMsg } from "./lib";
|
||||
|
||||
export let info: Scheduler.CongratsInfoResponse;
|
||||
export let info: CongratsInfoResponse;
|
||||
|
||||
const congrats = tr.schedulingCongratulationsFinished();
|
||||
let nextLearnMsg: string;
|
||||
|
|
|
@ -3,9 +3,9 @@
|
|||
|
||||
import "./congrats-base.scss";
|
||||
|
||||
import { congratsInfo } from "@tslib/anki/scheduler_service";
|
||||
import { ModuleName, setupI18n } from "@tslib/i18n";
|
||||
import { checkNightMode } from "@tslib/nightmode";
|
||||
import { empty, scheduler } from "@tslib/proto";
|
||||
|
||||
import CongratsPage from "./CongratsPage.svelte";
|
||||
|
||||
|
@ -16,7 +16,7 @@ export async function setupCongrats(): Promise<CongratsPage> {
|
|||
await i18n;
|
||||
|
||||
const customMountPoint = document.getElementById("congrats");
|
||||
const info = await scheduler.congratsInfo(empty);
|
||||
const info = await congratsInfo({});
|
||||
const page = new CongratsPage({
|
||||
// use #congrats if it exists, otherwise entire body
|
||||
target: customMountPoint ?? document.body,
|
||||
|
@ -26,7 +26,7 @@ export async function setupCongrats(): Promise<CongratsPage> {
|
|||
// refresh automatically if a custom area not provided
|
||||
if (!customMountPoint) {
|
||||
setInterval(async () => {
|
||||
const info = await scheduler.congratsInfo(empty);
|
||||
const info = await congratsInfo({});
|
||||
page.$set({ info });
|
||||
}, 60000);
|
||||
}
|
||||
|
|
|
@ -1,11 +1,11 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { CongratsInfoResponse } from "@tslib/anki/scheduler_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Scheduler } from "@tslib/proto";
|
||||
import { naturalUnit, unitAmount, unitName } from "@tslib/time";
|
||||
|
||||
export function buildNextLearnMsg(info: Scheduler.CongratsInfoResponse): string {
|
||||
export function buildNextLearnMsg(info: CongratsInfoResponse): string {
|
||||
const secsUntil = info.secsUntilNextLearn;
|
||||
// next learning card not due today?
|
||||
if (secsUntil >= 86_400) {
|
||||
|
|
|
@ -85,14 +85,14 @@
|
|||
new ValueTab(
|
||||
tr.deckConfigDeckOnly(),
|
||||
$limits.new ?? null,
|
||||
(value) => ($limits.new = value),
|
||||
(value) => ($limits.new = value ?? undefined),
|
||||
null,
|
||||
null,
|
||||
),
|
||||
new ValueTab(
|
||||
tr.deckConfigTodayOnly(),
|
||||
$limits.newTodayActive ? $limits.newToday ?? null : null,
|
||||
(value) => ($limits.newToday = value),
|
||||
(value) => ($limits.newToday = value ?? undefined),
|
||||
null,
|
||||
$limits.newToday ?? null,
|
||||
),
|
||||
|
@ -114,14 +114,14 @@
|
|||
new ValueTab(
|
||||
tr.deckConfigDeckOnly(),
|
||||
$limits.review ?? null,
|
||||
(value) => ($limits.review = value),
|
||||
(value) => ($limits.review = value ?? undefined),
|
||||
null,
|
||||
null,
|
||||
),
|
||||
new ValueTab(
|
||||
tr.deckConfigTodayOnly(),
|
||||
$limits.reviewTodayActive ? $limits.reviewToday ?? null : null,
|
||||
(value) => ($limits.reviewToday = value),
|
||||
(value) => ($limits.reviewToday = value ?? undefined),
|
||||
null,
|
||||
$limits.reviewToday ?? null,
|
||||
),
|
||||
|
|
|
@ -3,9 +3,12 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import {
|
||||
DeckConfig_Config_NewCardGatherPriority as GatherOrder,
|
||||
DeckConfig_Config_NewCardSortOrder as SortOrder,
|
||||
} from "@tslib/anki/deckconfig_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { HelpPage } from "@tslib/help-page";
|
||||
import { DeckConfig } from "@tslib/proto";
|
||||
import type Carousel from "bootstrap/js/dist/carousel";
|
||||
import type Modal from "bootstrap/js/dist/modal";
|
||||
|
||||
|
@ -53,27 +56,25 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
tr.deckConfigSortOrderRandom(),
|
||||
];
|
||||
|
||||
const GatherOrder = DeckConfig.DeckConfig.Config.NewCardGatherPriority;
|
||||
const SortOrder = DeckConfig.DeckConfig.Config.NewCardSortOrder;
|
||||
let disabledNewSortOrders: number[] = [];
|
||||
$: {
|
||||
switch ($config.newCardGatherPriority) {
|
||||
case GatherOrder.NEW_CARD_GATHER_PRIORITY_RANDOM_NOTES:
|
||||
case GatherOrder.RANDOM_NOTES:
|
||||
disabledNewSortOrders = [
|
||||
// same as NEW_CARD_SORT_ORDER_TEMPLATE
|
||||
SortOrder.NEW_CARD_SORT_ORDER_TEMPLATE_THEN_RANDOM,
|
||||
// same as NEW_CARD_SORT_ORDER_NO_SORT
|
||||
SortOrder.NEW_CARD_SORT_ORDER_RANDOM_NOTE_THEN_TEMPLATE,
|
||||
// same as TEMPLATE
|
||||
SortOrder.TEMPLATE_THEN_RANDOM,
|
||||
// same as NO_SORT
|
||||
SortOrder.RANDOM_NOTE_THEN_TEMPLATE,
|
||||
];
|
||||
break;
|
||||
case GatherOrder.NEW_CARD_GATHER_PRIORITY_RANDOM_CARDS:
|
||||
case GatherOrder.RANDOM_CARDS:
|
||||
disabledNewSortOrders = [
|
||||
// same as NEW_CARD_SORT_ORDER_TEMPLATE
|
||||
SortOrder.NEW_CARD_SORT_ORDER_TEMPLATE_THEN_RANDOM,
|
||||
// same as TEMPLATE
|
||||
SortOrder.TEMPLATE_THEN_RANDOM,
|
||||
// not useful if siblings are not gathered together
|
||||
SortOrder.NEW_CARD_SORT_ORDER_RANDOM_NOTE_THEN_TEMPLATE,
|
||||
// same as NEW_CARD_SORT_ORDER_NO_SORT
|
||||
SortOrder.NEW_CARD_SORT_ORDER_RANDOM_CARD,
|
||||
SortOrder.RANDOM_NOTE_THEN_TEMPLATE,
|
||||
// same as NO_SORT
|
||||
SortOrder.RANDOM_CARD,
|
||||
];
|
||||
break;
|
||||
default:
|
||||
|
|
|
@ -3,9 +3,9 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { DeckConfig_Config_NewCardInsertOrder } from "@tslib/anki/deckconfig_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { HelpPage } from "@tslib/help-page";
|
||||
import { DeckConfig } from "@tslib/proto";
|
||||
import type Carousel from "bootstrap/js/dist/carousel";
|
||||
import type Modal from "bootstrap/js/dist/modal";
|
||||
|
||||
|
@ -50,8 +50,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
|
||||
$: insertionOrderRandom =
|
||||
state.v3Scheduler &&
|
||||
$config.newCardInsertOrder ==
|
||||
DeckConfig.DeckConfig.Config.NewCardInsertOrder.NEW_CARD_INSERT_ORDER_RANDOM
|
||||
$config.newCardInsertOrder == DeckConfig_Config_NewCardInsertOrder.RANDOM
|
||||
? tr.deckConfigNewInsertionOrderRandomWithV3()
|
||||
: "";
|
||||
|
||||
|
|
|
@ -10,7 +10,6 @@ import "./deck-options-base.scss";
|
|||
|
||||
import { ModuleName, setupI18n } from "@tslib/i18n";
|
||||
import { checkNightMode } from "@tslib/nightmode";
|
||||
import { deckConfig, Decks } from "@tslib/proto";
|
||||
|
||||
import { modalsKey, touchDeviceKey } from "../components/context-keys";
|
||||
import DeckOptionsPage from "./DeckOptionsPage.svelte";
|
||||
|
@ -26,9 +25,10 @@ const i18n = setupI18n({
|
|||
],
|
||||
});
|
||||
|
||||
export async function setupDeckOptions(did: number): Promise<DeckOptionsPage> {
|
||||
export async function setupDeckOptions(did_: number): Promise<DeckOptionsPage> {
|
||||
const did = BigInt(did_);
|
||||
const [info] = await Promise.all([
|
||||
deckConfig.getDeckConfigsForUpdate(Decks.DeckId.create({ did })),
|
||||
getDeckConfigsForUpdate({ did }),
|
||||
i18n,
|
||||
]);
|
||||
|
||||
|
@ -38,7 +38,7 @@ export async function setupDeckOptions(did: number): Promise<DeckOptionsPage> {
|
|||
context.set(modalsKey, new Map());
|
||||
context.set(touchDeviceKey, "ontouchstart" in document.documentElement);
|
||||
|
||||
const state = new DeckOptionsState(did, info);
|
||||
const state = new DeckOptionsState(BigInt(did), info);
|
||||
return new DeckOptionsPage({
|
||||
target: document.body,
|
||||
props: { state },
|
||||
|
@ -46,6 +46,8 @@ export async function setupDeckOptions(did: number): Promise<DeckOptionsPage> {
|
|||
});
|
||||
}
|
||||
|
||||
import { getDeckConfigsForUpdate } from "@tslib/anki/deck_config_service";
|
||||
|
||||
import TitledContainer from "../components/TitledContainer.svelte";
|
||||
import EnumSelectorRow from "./EnumSelectorRow.svelte";
|
||||
import SpinBoxFloatRow from "./SpinBoxFloatRow.svelte";
|
||||
|
|
|
@ -5,7 +5,8 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import { DeckConfig } from "@tslib/proto";
|
||||
import { protoBase64 } from "@bufbuild/protobuf";
|
||||
import { DeckConfig_Config_LeechAction, DeckConfigsForUpdate } from "@tslib/anki/deckconfig_pb";
|
||||
import { get } from "svelte/store";
|
||||
|
||||
import { DeckOptionsState } from "./lib";
|
||||
|
@ -14,9 +15,9 @@ const exampleData = {
|
|||
allConfig: [
|
||||
{
|
||||
config: {
|
||||
id: "1",
|
||||
id: 1n,
|
||||
name: "Default",
|
||||
mtimeSecs: "1618570764",
|
||||
mtimeSecs: 1618570764n,
|
||||
usn: -1,
|
||||
config: {
|
||||
learnSteps: [1, 10],
|
||||
|
@ -31,19 +32,21 @@ const exampleData = {
|
|||
minimumLapseInterval: 1,
|
||||
graduatingIntervalGood: 1,
|
||||
graduatingIntervalEasy: 4,
|
||||
leechAction: "LEECH_ACTION_TAG_ONLY",
|
||||
leechAction: DeckConfig_Config_LeechAction.TAG_ONLY,
|
||||
leechThreshold: 8,
|
||||
capAnswerTimeToSecs: 60,
|
||||
other: "eyJuZXciOnsic2VwYXJhdGUiOnRydWV9LCJyZXYiOnsiZnV6eiI6MC4wNSwibWluU3BhY2UiOjF9fQ==",
|
||||
other: protoBase64.dec(
|
||||
"eyJuZXciOnsic2VwYXJhdGUiOnRydWV9LCJyZXYiOnsiZnV6eiI6MC4wNSwibWluU3BhY2UiOjF9fQ==",
|
||||
),
|
||||
},
|
||||
},
|
||||
useCount: 1,
|
||||
},
|
||||
{
|
||||
config: {
|
||||
id: "1618570764780",
|
||||
id: 1618570764780n,
|
||||
name: "another one",
|
||||
mtimeSecs: "1618570781",
|
||||
mtimeSecs: 1618570781n,
|
||||
usn: -1,
|
||||
config: {
|
||||
learnSteps: [1, 10, 20, 30],
|
||||
|
@ -58,7 +61,7 @@ const exampleData = {
|
|||
minimumLapseInterval: 1,
|
||||
graduatingIntervalGood: 1,
|
||||
graduatingIntervalEasy: 4,
|
||||
leechAction: "LEECH_ACTION_TAG_ONLY",
|
||||
leechAction: DeckConfig_Config_LeechAction.TAG_ONLY,
|
||||
leechThreshold: 8,
|
||||
capAnswerTimeToSecs: 60,
|
||||
},
|
||||
|
@ -68,8 +71,8 @@ const exampleData = {
|
|||
],
|
||||
currentDeck: {
|
||||
name: "Default::child",
|
||||
configId: "1618570764780",
|
||||
parentConfigIds: [1],
|
||||
configId: 1618570764780n,
|
||||
parentConfigIds: [1n],
|
||||
},
|
||||
defaults: {
|
||||
config: {
|
||||
|
@ -85,7 +88,7 @@ const exampleData = {
|
|||
minimumLapseInterval: 1,
|
||||
graduatingIntervalGood: 1,
|
||||
graduatingIntervalEasy: 4,
|
||||
leechAction: "LEECH_ACTION_TAG_ONLY",
|
||||
leechAction: DeckConfig_Config_LeechAction.TAG_ONLY,
|
||||
leechThreshold: 8,
|
||||
capAnswerTimeToSecs: 60,
|
||||
},
|
||||
|
@ -94,8 +97,8 @@ const exampleData = {
|
|||
|
||||
function startingState(): DeckOptionsState {
|
||||
return new DeckOptionsState(
|
||||
123,
|
||||
DeckConfig.DeckConfigsForUpdate.fromObject(exampleData),
|
||||
123n,
|
||||
new DeckConfigsForUpdate(exampleData),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -202,7 +205,7 @@ test("deck list", () => {
|
|||
|
||||
// only the pre-existing deck should be listed for removal
|
||||
const out = state.dataForSaving(false);
|
||||
expect(out.removedConfigIds).toStrictEqual([1618570764780]);
|
||||
expect(out.removedConfigIds).toStrictEqual([1618570764780n]);
|
||||
});
|
||||
|
||||
test("duplicate name", () => {
|
||||
|
@ -242,7 +245,7 @@ test("saving", () => {
|
|||
let state = startingState();
|
||||
let out = state.dataForSaving(false);
|
||||
expect(out.removedConfigIds).toStrictEqual([]);
|
||||
expect(out.targetDeckId).toBe(123);
|
||||
expect(out.targetDeckId).toBe(123n);
|
||||
// in no-changes case, currently selected config should
|
||||
// be returned
|
||||
expect(out.configs!.length).toBe(1);
|
||||
|
@ -275,7 +278,7 @@ test("saving", () => {
|
|||
// should be listed in removedConfigs, and modified should
|
||||
// only contain Default, which is the new current deck
|
||||
out = state.dataForSaving(true);
|
||||
expect(out.removedConfigIds).toStrictEqual([1618570764780]);
|
||||
expect(out.removedConfigIds).toStrictEqual([1618570764780n]);
|
||||
expect(out.configs!.map((c) => c.name)).toStrictEqual(["Default"]);
|
||||
});
|
||||
|
||||
|
|
|
@ -1,18 +1,25 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { PlainMessage } from "@bufbuild/protobuf";
|
||||
import { updateDeckConfigs } from "@tslib/anki/deck_config_service";
|
||||
import type {
|
||||
DeckConfigsForUpdate,
|
||||
DeckConfigsForUpdate_CurrentDeck,
|
||||
UpdateDeckConfigsRequest,
|
||||
} from "@tslib/anki/deckconfig_pb";
|
||||
import { DeckConfig, DeckConfig_Config, DeckConfigsForUpdate_CurrentDeck_Limits } from "@tslib/anki/deckconfig_pb";
|
||||
import { localeCompare } from "@tslib/i18n";
|
||||
import { DeckConfig, deckConfig } from "@tslib/proto";
|
||||
import { cloneDeep, isEqual } from "lodash-es";
|
||||
import type { Readable, Writable } from "svelte/store";
|
||||
import { get, readable, writable } from "svelte/store";
|
||||
|
||||
import type { DynamicSvelteComponent } from "../sveltelib/dynamicComponent";
|
||||
|
||||
export type DeckOptionsId = number;
|
||||
export type DeckOptionsId = bigint;
|
||||
|
||||
export interface ConfigWithCount {
|
||||
config: DeckConfig.DeckConfig;
|
||||
config: DeckConfig;
|
||||
useCount: number;
|
||||
}
|
||||
|
||||
|
@ -30,19 +37,19 @@ export interface ConfigListEntry {
|
|||
}
|
||||
|
||||
export class DeckOptionsState {
|
||||
readonly currentConfig: Writable<DeckConfig.DeckConfig.Config>;
|
||||
readonly currentConfig: Writable<DeckConfig_Config>;
|
||||
readonly currentAuxData: Writable<Record<string, unknown>>;
|
||||
readonly configList: Readable<ConfigListEntry[]>;
|
||||
readonly parentLimits: Readable<ParentLimits>;
|
||||
readonly cardStateCustomizer: Writable<string>;
|
||||
readonly currentDeck: DeckConfig.DeckConfigsForUpdate.CurrentDeck;
|
||||
readonly deckLimits: Writable<DeckConfig.DeckConfigsForUpdate.CurrentDeck.Limits>;
|
||||
readonly defaults: DeckConfig.DeckConfig.Config;
|
||||
readonly currentDeck: DeckConfigsForUpdate_CurrentDeck;
|
||||
readonly deckLimits: Writable<DeckConfigsForUpdate_CurrentDeck_Limits>;
|
||||
readonly defaults: DeckConfig_Config;
|
||||
readonly addonComponents: Writable<DynamicSvelteComponent[]>;
|
||||
readonly v3Scheduler: boolean;
|
||||
readonly newCardsIgnoreReviewLimit: Writable<boolean>;
|
||||
|
||||
private targetDeckId: number;
|
||||
private targetDeckId: DeckOptionsId;
|
||||
private configs: ConfigWithCount[];
|
||||
private selectedIdx: number;
|
||||
private configListSetter!: (val: ConfigListEntry[]) => void;
|
||||
|
@ -51,7 +58,7 @@ export class DeckOptionsState {
|
|||
private removedConfigs: DeckOptionsId[] = [];
|
||||
private schemaModified: boolean;
|
||||
|
||||
constructor(targetDeckId: number, data: DeckConfig.DeckConfigsForUpdate) {
|
||||
constructor(targetDeckId: DeckOptionsId, data: DeckConfigsForUpdate) {
|
||||
this.targetDeckId = targetDeckId;
|
||||
this.currentDeck = data.currentDeck!;
|
||||
this.defaults = data.defaults!.config!;
|
||||
|
@ -135,12 +142,12 @@ export class DeckOptionsState {
|
|||
}
|
||||
|
||||
/** Clone the current config, making it current. */
|
||||
private addConfigFrom(name: string, source: DeckConfig.DeckConfig.IConfig): void {
|
||||
private addConfigFrom(name: string, source: DeckConfig_Config): void {
|
||||
const uniqueName = this.ensureNewNameUnique(name);
|
||||
const config = DeckConfig.DeckConfig.create({
|
||||
id: 0,
|
||||
const config = new DeckConfig({
|
||||
id: 0n,
|
||||
name: uniqueName,
|
||||
config: DeckConfig.DeckConfig.Config.create(cloneDeep(source)),
|
||||
config: new DeckConfig_Config(cloneDeep(source)),
|
||||
});
|
||||
const configWithCount = { config, useCount: 0 };
|
||||
this.configs.push(configWithCount);
|
||||
|
@ -151,20 +158,20 @@ export class DeckOptionsState {
|
|||
}
|
||||
|
||||
removalWilLForceFullSync(): boolean {
|
||||
return !this.schemaModified && this.configs[this.selectedIdx].config.id !== 0;
|
||||
return !this.schemaModified && this.configs[this.selectedIdx].config.id !== 0n;
|
||||
}
|
||||
|
||||
defaultConfigSelected(): boolean {
|
||||
return this.configs[this.selectedIdx].config.id === 1;
|
||||
return this.configs[this.selectedIdx].config.id === 1n;
|
||||
}
|
||||
|
||||
/** Will throw if the default deck is selected. */
|
||||
removeCurrentConfig(): void {
|
||||
const currentId = this.configs[this.selectedIdx].config.id;
|
||||
if (currentId === 1) {
|
||||
if (currentId === 1n) {
|
||||
throw Error("can't remove default config");
|
||||
}
|
||||
if (currentId !== 0) {
|
||||
if (currentId !== 0n) {
|
||||
this.removedConfigs.push(currentId);
|
||||
this.schemaModified = true;
|
||||
}
|
||||
|
@ -176,13 +183,13 @@ export class DeckOptionsState {
|
|||
|
||||
dataForSaving(
|
||||
applyToChildren: boolean,
|
||||
): NonNullable<DeckConfig.IUpdateDeckConfigsRequest> {
|
||||
): PlainMessage<UpdateDeckConfigsRequest> {
|
||||
const modifiedConfigsExcludingCurrent = this.configs
|
||||
.map((c) => c.config)
|
||||
.filter((c, idx) => {
|
||||
return (
|
||||
idx !== this.selectedIdx
|
||||
&& (c.id === 0 || this.modifiedConfigs.has(c.id))
|
||||
&& (c.id === 0n || this.modifiedConfigs.has(c.id))
|
||||
);
|
||||
});
|
||||
const configs = [
|
||||
|
@ -202,14 +209,12 @@ export class DeckOptionsState {
|
|||
}
|
||||
|
||||
async save(applyToChildren: boolean): Promise<void> {
|
||||
await deckConfig.updateDeckConfigs(
|
||||
DeckConfig.UpdateDeckConfigsRequest.create(
|
||||
this.dataForSaving(applyToChildren),
|
||||
),
|
||||
await updateDeckConfigs(
|
||||
this.dataForSaving(applyToChildren),
|
||||
);
|
||||
}
|
||||
|
||||
private onCurrentConfigChanged(config: DeckConfig.DeckConfig.Config): void {
|
||||
private onCurrentConfigChanged(config: DeckConfig_Config): void {
|
||||
const configOuter = this.configs[this.selectedIdx].config;
|
||||
if (!isEqual(config, configOuter.config)) {
|
||||
configOuter.config = config;
|
||||
|
@ -251,7 +256,7 @@ export class DeckOptionsState {
|
|||
}
|
||||
|
||||
/** Returns a copy of the currently selected config. */
|
||||
private getCurrentConfig(): DeckConfig.DeckConfig.Config {
|
||||
private getCurrentConfig(): DeckConfig_Config {
|
||||
return cloneDeep(this.configs[this.selectedIdx].config.config!);
|
||||
}
|
||||
|
||||
|
@ -321,8 +326,8 @@ function bytesToObject(bytes: Uint8Array): Record<string, unknown> {
|
|||
return obj;
|
||||
}
|
||||
|
||||
export function createLimits(): DeckConfig.DeckConfigsForUpdate.CurrentDeck.Limits {
|
||||
return DeckConfig.DeckConfigsForUpdate.CurrentDeck.Limits.create({});
|
||||
export function createLimits(): DeckConfigsForUpdate_CurrentDeck_Limits {
|
||||
return new DeckConfigsForUpdate_CurrentDeck_Limits({});
|
||||
}
|
||||
|
||||
export class ValueTab {
|
||||
|
|
|
@ -3,14 +3,14 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
import type { PreferenceStore } from "../sveltelib/preferences";
|
||||
import type { GraphData } from "./added";
|
||||
import { buildHistogram, gatherData } from "./added";
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { GraphPrefs } from "./graph-helpers";
|
||||
import type { SearchEventMap, TableDatum } from "./graph-helpers";
|
||||
import { GraphRange, RevlogRange } from "./graph-helpers";
|
||||
import GraphRangeRadios from "./GraphRangeRadios.svelte";
|
||||
|
@ -19,13 +19,12 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import InputBox from "./InputBox.svelte";
|
||||
import TableData from "./TableData.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let preferences: PreferenceStore<Stats.GraphPreferences>;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let prefs: GraphPrefs;
|
||||
|
||||
let histogramData = null as HistogramData | null;
|
||||
let tableData: TableDatum[] = [];
|
||||
let graphRange: GraphRange = GraphRange.Month;
|
||||
const { browserLinksSupported } = preferences;
|
||||
|
||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||
|
||||
|
@ -39,7 +38,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
addedData,
|
||||
graphRange,
|
||||
dispatch,
|
||||
$browserLinksSupported,
|
||||
$prefs.browserLinksSupported,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -3,8 +3,8 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
|
||||
import AxisTicks from "./AxisTicks.svelte";
|
||||
import { renderButtons } from "./buttons";
|
||||
|
@ -16,7 +16,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import InputBox from "./InputBox.svelte";
|
||||
import NoDataOverlay from "./NoDataOverlay.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let revlogRange: RevlogRange;
|
||||
|
||||
let graphRange: GraphRange = GraphRange.Year;
|
||||
|
|
|
@ -3,26 +3,24 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
import type { PreferenceStore } from "../sveltelib/preferences";
|
||||
import AxisTicks from "./AxisTicks.svelte";
|
||||
import type { GraphData } from "./calendar";
|
||||
import { gatherData, renderCalendar } from "./calendar";
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { SearchEventMap } from "./graph-helpers";
|
||||
import type { GraphPrefs, SearchEventMap } from "./graph-helpers";
|
||||
import { defaultGraphBounds, RevlogRange } from "./graph-helpers";
|
||||
import InputBox from "./InputBox.svelte";
|
||||
import NoDataOverlay from "./NoDataOverlay.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse;
|
||||
export let preferences: PreferenceStore<Stats.GraphPreferences>;
|
||||
export let sourceData: GraphsResponse;
|
||||
export let prefs: GraphPrefs;
|
||||
export let revlogRange: RevlogRange;
|
||||
export let nightMode: boolean;
|
||||
|
||||
const { calendarFirstDayOfWeek } = preferences;
|
||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||
|
||||
let graphData: GraphData | null = null;
|
||||
|
@ -38,7 +36,17 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
let targetYear = maxYear;
|
||||
|
||||
$: if (sourceData) {
|
||||
graphData = gatherData(sourceData, $calendarFirstDayOfWeek);
|
||||
graphData = gatherData(sourceData, $prefs.calendarFirstDayOfWeek as number);
|
||||
renderCalendar(
|
||||
svg as SVGElement,
|
||||
bounds,
|
||||
graphData,
|
||||
dispatch,
|
||||
targetYear,
|
||||
nightMode,
|
||||
revlogRange,
|
||||
(day) => ($prefs.calendarFirstDayOfWeek = day),
|
||||
);
|
||||
}
|
||||
|
||||
$: {
|
||||
|
@ -54,19 +62,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
}
|
||||
}
|
||||
|
||||
$: if (graphData) {
|
||||
renderCalendar(
|
||||
svg as SVGElement,
|
||||
bounds,
|
||||
graphData,
|
||||
dispatch,
|
||||
targetYear,
|
||||
nightMode,
|
||||
revlogRange,
|
||||
calendarFirstDayOfWeek.set,
|
||||
);
|
||||
}
|
||||
|
||||
const title = tr.statisticsCalendarTitle();
|
||||
</script>
|
||||
|
||||
|
|
|
@ -3,22 +3,21 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr2 from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
import type { PreferenceStore } from "../sveltelib/preferences";
|
||||
import type { GraphData, TableDatum } from "./card-counts";
|
||||
import { gatherData, renderCards } from "./card-counts";
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { GraphPrefs } from "./graph-helpers";
|
||||
import type { SearchEventMap } from "./graph-helpers";
|
||||
import { defaultGraphBounds } from "./graph-helpers";
|
||||
import InputBox from "./InputBox.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse;
|
||||
export let preferences: PreferenceStore<Stats.GraphPreferences>;
|
||||
export let sourceData: GraphsResponse;
|
||||
export let prefs: GraphPrefs;
|
||||
|
||||
const { cardCountsSeparateInactive, browserLinksSupported } = preferences;
|
||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||
|
||||
let svg = null as HTMLElement | SVGElement | null;
|
||||
|
@ -31,7 +30,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
let tableData = null as unknown as TableDatum[];
|
||||
|
||||
$: {
|
||||
graphData = gatherData(sourceData, $cardCountsSeparateInactive);
|
||||
graphData = gatherData(sourceData, $prefs.cardCountsSeparateInactive);
|
||||
tableData = renderCards(svg as any, bounds, graphData);
|
||||
}
|
||||
|
||||
|
@ -42,7 +41,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
<Graph title={graphData.title}>
|
||||
<InputBox>
|
||||
<label>
|
||||
<input type="checkbox" bind:checked={$cardCountsSeparateInactive} />
|
||||
<input type="checkbox" bind:checked={$prefs.cardCountsSeparateInactive} />
|
||||
{label}
|
||||
</label>
|
||||
</InputBox>
|
||||
|
@ -64,7 +63,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
<!-- prettier-ignore -->
|
||||
<td>
|
||||
<span style="color: {d.colour};">■ </span>
|
||||
{#if browserLinksSupported}
|
||||
{#if $prefs.browserLinksSupported}
|
||||
<span class="search-link" on:click={() => dispatch('search', { query: d.query })}>{d.label}</span>
|
||||
{:else}
|
||||
<span>{d.label}</span>
|
||||
|
|
|
@ -3,32 +3,31 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
import type { PreferenceStore } from "../sveltelib/preferences";
|
||||
import { gatherData, prepareData } from "./ease";
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { GraphPrefs } from "./graph-helpers";
|
||||
import type { SearchEventMap, TableDatum } from "./graph-helpers";
|
||||
import type { HistogramData } from "./histogram-graph";
|
||||
import HistogramGraph from "./HistogramGraph.svelte";
|
||||
import TableData from "./TableData.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let preferences: PreferenceStore<Stats.GraphPreferences>;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let prefs: GraphPrefs;
|
||||
|
||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||
|
||||
let histogramData = null as HistogramData | null;
|
||||
let tableData: TableDatum[] = [];
|
||||
const { browserLinksSupported } = preferences;
|
||||
|
||||
$: if (sourceData) {
|
||||
[histogramData, tableData] = prepareData(
|
||||
gatherData(sourceData),
|
||||
dispatch,
|
||||
$browserLinksSupported,
|
||||
$prefs.browserLinksSupported,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -3,14 +3,14 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
import type { PreferenceStore } from "../sveltelib/preferences";
|
||||
import type { GraphData } from "./future-due";
|
||||
import { buildHistogram, gatherData } from "./future-due";
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { GraphPrefs } from "./graph-helpers";
|
||||
import type { SearchEventMap, TableDatum } from "./graph-helpers";
|
||||
import { GraphRange, RevlogRange } from "./graph-helpers";
|
||||
import GraphRangeRadios from "./GraphRangeRadios.svelte";
|
||||
|
@ -19,8 +19,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import InputBox from "./InputBox.svelte";
|
||||
import TableData from "./TableData.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let preferences: PreferenceStore<Stats.GraphPreferences>;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let prefs: GraphPrefs;
|
||||
|
||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||
|
||||
|
@ -28,7 +28,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
let histogramData = null as HistogramData | null;
|
||||
let tableData: TableDatum[] = [] as any;
|
||||
let graphRange: GraphRange = GraphRange.Month;
|
||||
const { browserLinksSupported, futureDueShowBacklog } = preferences;
|
||||
|
||||
$: if (sourceData) {
|
||||
graphData = gatherData(sourceData);
|
||||
|
@ -38,9 +37,9 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
({ histogramData, tableData } = buildHistogram(
|
||||
graphData,
|
||||
graphRange,
|
||||
$futureDueShowBacklog,
|
||||
$prefs.futureDueShowBacklog,
|
||||
dispatch,
|
||||
$browserLinksSupported,
|
||||
$prefs.browserLinksSupported,
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -53,7 +52,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
<InputBox>
|
||||
{#if graphData && graphData.haveBacklog}
|
||||
<label>
|
||||
<input type="checkbox" bind:checked={$futureDueShowBacklog} />
|
||||
<input type="checkbox" bind:checked={$prefs.futureDueShowBacklog} />
|
||||
{backlogLabel}
|
||||
</label>
|
||||
{/if}
|
||||
|
|
|
@ -17,6 +17,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
const days = writable(initialDays);
|
||||
|
||||
export let graphs: typeof SvelteComponentDev[];
|
||||
/** See RangeBox */
|
||||
export let controller: typeof SvelteComponentDev | null;
|
||||
|
||||
function browserSearch(event: CustomEvent) {
|
||||
|
@ -24,25 +25,18 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
}
|
||||
</script>
|
||||
|
||||
<WithGraphData
|
||||
{search}
|
||||
{days}
|
||||
let:loading
|
||||
let:sourceData
|
||||
let:preferences
|
||||
let:revlogRange
|
||||
>
|
||||
<WithGraphData {search} {days} let:sourceData let:loading let:prefs let:revlogRange>
|
||||
{#if controller}
|
||||
<svelte:component this={controller} {search} {days} {loading} />
|
||||
{/if}
|
||||
|
||||
<div class="graphs-container">
|
||||
{#if sourceData && preferences && revlogRange}
|
||||
{#if sourceData && revlogRange}
|
||||
{#each graphs as graph}
|
||||
<svelte:component
|
||||
this={graph}
|
||||
{sourceData}
|
||||
{preferences}
|
||||
{prefs}
|
||||
{revlogRange}
|
||||
nightMode={$pageTheme.isDark}
|
||||
on:search={browserSearch}
|
||||
|
|
|
@ -3,8 +3,8 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
|
||||
import AxisTicks from "./AxisTicks.svelte";
|
||||
import CumulativeOverlay from "./CumulativeOverlay.svelte";
|
||||
|
@ -17,7 +17,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import InputBox from "./InputBox.svelte";
|
||||
import NoDataOverlay from "./NoDataOverlay.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let revlogRange: RevlogRange;
|
||||
let graphRange: GraphRange = GraphRange.Year;
|
||||
|
||||
|
|
|
@ -3,13 +3,13 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { MONTH, timeSpan } from "@tslib/time";
|
||||
import { createEventDispatcher } from "svelte";
|
||||
|
||||
import type { PreferenceStore } from "../sveltelib/preferences";
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { GraphPrefs } from "./graph-helpers";
|
||||
import type { SearchEventMap, TableDatum } from "./graph-helpers";
|
||||
import type { HistogramData } from "./histogram-graph";
|
||||
import HistogramGraph from "./HistogramGraph.svelte";
|
||||
|
@ -22,8 +22,8 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
} from "./intervals";
|
||||
import TableData from "./TableData.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let preferences: PreferenceStore<Stats.GraphPreferences>;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let prefs: GraphPrefs;
|
||||
|
||||
const dispatch = createEventDispatcher<SearchEventMap>();
|
||||
|
||||
|
@ -31,7 +31,6 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
let histogramData = null as HistogramData | null;
|
||||
let tableData: TableDatum[] = [];
|
||||
let range = IntervalRange.Percentile95;
|
||||
const { browserLinksSupported } = preferences;
|
||||
|
||||
$: if (sourceData) {
|
||||
intervalData = gatherIntervalData(sourceData);
|
||||
|
@ -42,7 +41,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
intervalData,
|
||||
range,
|
||||
dispatch,
|
||||
$browserLinksSupported,
|
||||
$prefs.browserLinksSupported,
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
</script>
|
||||
|
||||
<div class="range-box">
|
||||
<div class="spin" class:loading>◐</div>
|
||||
<div class="spin no-reduce-motion" class:loading>◐</div>
|
||||
|
||||
<InputBox>
|
||||
<label>
|
||||
|
|
|
@ -3,8 +3,8 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
|
||||
import AxisTicks from "./AxisTicks.svelte";
|
||||
import CumulativeOverlay from "./CumulativeOverlay.svelte";
|
||||
|
@ -19,7 +19,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import { gatherData, renderReviews } from "./reviews";
|
||||
import TableData from "./TableData.svelte";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
export let revlogRange: RevlogRange;
|
||||
|
||||
let graphData: GraphData | null = null;
|
||||
|
|
|
@ -3,13 +3,13 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
|
||||
import Graph from "./Graph.svelte";
|
||||
import type { TodayData } from "./today";
|
||||
import { gatherData } from "./today";
|
||||
|
||||
export let sourceData: Stats.GraphsResponse | null = null;
|
||||
export let sourceData: GraphsResponse | null = null;
|
||||
|
||||
let todayData: TodayData | null = null;
|
||||
$: if (sourceData) {
|
||||
|
|
|
@ -3,63 +3,49 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { empty, Stats, stats } from "@tslib/proto";
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import {
|
||||
getGraphPreferences,
|
||||
graphs,
|
||||
setGraphPreferences,
|
||||
} from "@tslib/anki/stats_service";
|
||||
import type { Writable } from "svelte/store";
|
||||
|
||||
import useAsync from "../sveltelib/async";
|
||||
import useAsyncReactive from "../sveltelib/asyncReactive";
|
||||
import type { PreferenceRaw } from "../sveltelib/preferences";
|
||||
import { getPreferences } from "../sveltelib/preferences";
|
||||
import { autoSavingPrefs } from "../sveltelib/preferences";
|
||||
import { daysToRevlogRange } from "./graph-helpers";
|
||||
|
||||
export let search: Writable<string>;
|
||||
export let days: Writable<number>;
|
||||
|
||||
const {
|
||||
loading: graphLoading,
|
||||
error: graphError,
|
||||
value: graphValue,
|
||||
} = useAsyncReactive(
|
||||
() =>
|
||||
stats.graphs(Stats.GraphsRequest.create({ search: $search, days: $days })),
|
||||
[search, days],
|
||||
const prefsPromise = autoSavingPrefs(
|
||||
() => getGraphPreferences({}),
|
||||
setGraphPreferences,
|
||||
);
|
||||
|
||||
const {
|
||||
loading: prefsLoading,
|
||||
error: prefsError,
|
||||
value: prefsValue,
|
||||
} = useAsync(() =>
|
||||
getPreferences(
|
||||
() => stats.getGraphPreferences(empty),
|
||||
async (input: Stats.IGraphPreferences): Promise<void> => {
|
||||
stats.setGraphPreferences(Stats.GraphPreferences.create(input));
|
||||
},
|
||||
Stats.GraphPreferences.toObject.bind(Stats.GraphPreferences) as (
|
||||
preferences: Stats.GraphPreferences,
|
||||
options: { defaults: boolean },
|
||||
) => PreferenceRaw<Stats.GraphPreferences>,
|
||||
),
|
||||
);
|
||||
let sourceData = null as null | GraphsResponse;
|
||||
let loading = true;
|
||||
$: updateSourceData($search, $days);
|
||||
|
||||
async function updateSourceData(search: string, days: number): Promise<void> {
|
||||
// ensure the fast-loading preferences come first
|
||||
await prefsPromise;
|
||||
loading = true;
|
||||
try {
|
||||
sourceData = await graphs({ search, days });
|
||||
} finally {
|
||||
loading = false;
|
||||
}
|
||||
}
|
||||
|
||||
$: revlogRange = daysToRevlogRange($days);
|
||||
|
||||
$: {
|
||||
if ($graphError) {
|
||||
alert($graphError);
|
||||
}
|
||||
}
|
||||
|
||||
$: {
|
||||
if ($prefsError) {
|
||||
alert($prefsError);
|
||||
}
|
||||
}
|
||||
</script>
|
||||
|
||||
<slot
|
||||
{revlogRange}
|
||||
loading={$graphLoading || $prefsLoading}
|
||||
sourceData={$graphValue}
|
||||
preferences={$prefsValue}
|
||||
/>
|
||||
<!--
|
||||
We block graphs loading until the preferences have been fetched, so graphs
|
||||
don't have to worry about a null initial value. We don't do the same for the
|
||||
graph data, as it gets updated as the user changes options, and we don't want
|
||||
the current graphs to disappear until the new graphs have finished loading.
|
||||
-->
|
||||
{#await prefsPromise then prefs}
|
||||
<slot {revlogRange} {prefs} {sourceData} {loading} />
|
||||
{/await}
|
||||
|
|
|
@ -5,8 +5,8 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { dayLabel } from "@tslib/time";
|
||||
import type { Bin } from "d3";
|
||||
import { bin, interpolateBlues, min, scaleLinear, scaleSequential, sum } from "d3";
|
||||
|
@ -19,7 +19,7 @@ export interface GraphData {
|
|||
daysAdded: Map<number, number>;
|
||||
}
|
||||
|
||||
export function gatherData(data: Stats.GraphsResponse): GraphData {
|
||||
export function gatherData(data: GraphsResponse): GraphData {
|
||||
return { daysAdded: numericMap(data.added!.added) };
|
||||
}
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import {
|
||||
axisBottom,
|
||||
axisLeft,
|
||||
|
@ -34,7 +34,7 @@ export interface GraphData {
|
|||
mature: ButtonCounts;
|
||||
}
|
||||
|
||||
export function gatherData(data: Stats.GraphsResponse, range: GraphRange): GraphData {
|
||||
export function gatherData(data: GraphsResponse, range: GraphRange): GraphData {
|
||||
const buttons = data.buttons!;
|
||||
switch (range) {
|
||||
case GraphRange.Month:
|
||||
|
@ -65,7 +65,7 @@ interface TotalCorrect {
|
|||
export function renderButtons(
|
||||
svgElem: SVGElement,
|
||||
bounds: GraphBounds,
|
||||
origData: Stats.GraphsResponse,
|
||||
origData: GraphsResponse,
|
||||
range: GraphRange,
|
||||
): void {
|
||||
const sourceData = gatherData(origData, range);
|
||||
|
|
|
@ -1,9 +1,10 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import { GraphPreferences_Weekday as Weekday } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedDate, weekdayLabel } from "@tslib/i18n";
|
||||
import { Stats } from "@tslib/proto";
|
||||
import type { CountableTimeInterval } from "d3";
|
||||
import { timeHour } from "d3";
|
||||
import {
|
||||
|
@ -43,12 +44,9 @@ interface DayDatum {
|
|||
date: Date;
|
||||
}
|
||||
|
||||
type WeekdayType = Stats.GraphPreferences.Weekday;
|
||||
const Weekday = Stats.GraphPreferences.Weekday; /* enum */
|
||||
|
||||
export function gatherData(
|
||||
data: Stats.GraphsResponse,
|
||||
firstDayOfWeek: WeekdayType,
|
||||
data: GraphsResponse,
|
||||
firstDayOfWeek: Weekday,
|
||||
): GraphData {
|
||||
const reviewCount = new Map(
|
||||
Object.entries(data.reviews!.count).map(([k, v]) => {
|
||||
|
@ -205,7 +203,7 @@ export function renderCalendar(
|
|||
.attr("fill", (d: DayDatum) => (d.count === 0 ? emptyColour : blues(d.count)!));
|
||||
}
|
||||
|
||||
function timeFunctionForDay(firstDayOfWeek: WeekdayType): CountableTimeInterval {
|
||||
function timeFunctionForDay(firstDayOfWeek: Weekday): CountableTimeInterval {
|
||||
switch (firstDayOfWeek) {
|
||||
case Weekday.MONDAY:
|
||||
return timeMonday;
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import {
|
||||
arc,
|
||||
cumsum,
|
||||
|
@ -41,7 +41,7 @@ const barColours = [
|
|||
"grey", /* buried */
|
||||
];
|
||||
|
||||
function countCards(data: Stats.GraphsResponse, separateInactive: boolean): Count[] {
|
||||
function countCards(data: GraphsResponse, separateInactive: boolean): Count[] {
|
||||
const countData = separateInactive ? data.cardCounts!.excludingInactive! : data.cardCounts!.includingInactive!;
|
||||
|
||||
const extraQuery = separateInactive ? "AND -(\"is:buried\" OR \"is:suspended\")" : "";
|
||||
|
@ -85,7 +85,7 @@ function countCards(data: Stats.GraphsResponse, separateInactive: boolean): Coun
|
|||
}
|
||||
|
||||
export function gatherData(
|
||||
data: Stats.GraphsResponse,
|
||||
data: GraphsResponse,
|
||||
separateInactive: boolean,
|
||||
): GraphData {
|
||||
const counts = countCards(data, separateInactive);
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import type { Bin, ScaleLinear } from "d3";
|
||||
import { bin, extent, interpolateRdYlGn, scaleLinear, scaleSequential, sum } from "d3";
|
||||
|
||||
|
@ -19,7 +19,7 @@ export interface GraphData {
|
|||
eases: Map<number, number>;
|
||||
}
|
||||
|
||||
export function gatherData(data: Stats.GraphsResponse): GraphData {
|
||||
export function gatherData(data: GraphsResponse): GraphData {
|
||||
return { eases: numericMap(data.eases!.eases) };
|
||||
}
|
||||
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { dayLabel } from "@tslib/time";
|
||||
import type { Bin } from "d3";
|
||||
import { bin, extent, interpolateGreens, scaleLinear, scaleSequential, sum } from "d3";
|
||||
|
@ -21,7 +21,7 @@ export interface GraphData {
|
|||
haveBacklog: boolean;
|
||||
}
|
||||
|
||||
export function gatherData(data: Stats.GraphsResponse): GraphData {
|
||||
export function gatherData(data: GraphsResponse): GraphData {
|
||||
const msg = data.futureDue!;
|
||||
return { dueCounts: numericMap(msg.futureDue), haveBacklog: msg.haveBacklog };
|
||||
}
|
||||
|
|
|
@ -5,9 +5,10 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
@typescript-eslint/ban-ts-comment: "off" */
|
||||
|
||||
import type { Cards, Stats } from "@tslib/proto";
|
||||
import type { GraphPreferences } from "@tslib/anki/stats_pb";
|
||||
import type { Bin, Selection } from "d3";
|
||||
import { sum } from "d3";
|
||||
import type { PreferenceStore } from "sveltelib/preferences";
|
||||
|
||||
// amount of data to fetch from backend
|
||||
export enum RevlogRange {
|
||||
|
@ -27,13 +28,6 @@ export enum GraphRange {
|
|||
AllTime = 3,
|
||||
}
|
||||
|
||||
export interface GraphsContext {
|
||||
cards: Cards.Card[];
|
||||
revlog: Stats.RevlogEntry[];
|
||||
revlogRange: RevlogRange;
|
||||
nightMode: boolean;
|
||||
}
|
||||
|
||||
export interface GraphBounds {
|
||||
width: number;
|
||||
height: number;
|
||||
|
@ -54,6 +48,8 @@ export function defaultGraphBounds(): GraphBounds {
|
|||
};
|
||||
}
|
||||
|
||||
export type GraphPrefs = PreferenceStore<GraphPreferences>;
|
||||
|
||||
export function setDataAvailable(
|
||||
svg: Selection<SVGElement, any, any, any>,
|
||||
available: boolean,
|
||||
|
|
|
@ -5,9 +5,10 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import type { GraphsResponse_Hours_Hour } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import {
|
||||
area,
|
||||
axisBottom,
|
||||
|
@ -33,8 +34,8 @@ interface Hour {
|
|||
correctCount: number;
|
||||
}
|
||||
|
||||
function gatherData(data: Stats.GraphsResponse, range: GraphRange): Hour[] {
|
||||
function convert(hours: Stats.GraphsResponse.Hours.IHour[]): Hour[] {
|
||||
function gatherData(data: GraphsResponse, range: GraphRange): Hour[] {
|
||||
function convert(hours: GraphsResponse_Hours_Hour[]): Hour[] {
|
||||
return hours.map((e, idx) => {
|
||||
return { hour: idx, totalCount: e.total!, correctCount: e.correct! };
|
||||
});
|
||||
|
@ -54,7 +55,7 @@ function gatherData(data: Stats.GraphsResponse, range: GraphRange): Hour[] {
|
|||
export function renderHours(
|
||||
svgElem: SVGElement,
|
||||
bounds: GraphBounds,
|
||||
origData: Stats.GraphsResponse,
|
||||
origData: GraphsResponse,
|
||||
range: GraphRange,
|
||||
): void {
|
||||
const data = gatherData(origData, range);
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { timeSpan } from "@tslib/time";
|
||||
import type { Bin } from "d3";
|
||||
import { bin, extent, interpolateBlues, mean, quantile, scaleLinear, scaleSequential, sum } from "d3";
|
||||
|
@ -27,7 +27,7 @@ export enum IntervalRange {
|
|||
All = 3,
|
||||
}
|
||||
|
||||
export function gatherIntervalData(data: Stats.GraphsResponse): IntervalGraphData {
|
||||
export function gatherIntervalData(data: GraphsResponse): IntervalGraphData {
|
||||
// This could be made more efficient - this graph currently expects a flat list of individual intervals which it
|
||||
// uses to calculate a percentile and then converts into a histogram, and the percentile/histogram calculations
|
||||
// in JS are relatively slow.
|
||||
|
|
|
@ -5,9 +5,9 @@
|
|||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { dayLabel, timeSpan } from "@tslib/time";
|
||||
import type { Bin, ScaleSequential } from "d3";
|
||||
import {
|
||||
|
@ -51,7 +51,7 @@ export interface GraphData {
|
|||
|
||||
type BinType = Bin<Map<number, Reviews[]>, number>;
|
||||
|
||||
export function gatherData(data: Stats.GraphsResponse): GraphData {
|
||||
export function gatherData(data: GraphsResponse): GraphData {
|
||||
return { reviewCount: numericMap(data.reviews!.count), reviewTime: numericMap(data.reviews!.time) };
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { GraphsResponse } from "@tslib/anki/stats_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { localizedNumber } from "@tslib/i18n";
|
||||
import type { Stats } from "@tslib/proto";
|
||||
import { studiedToday } from "@tslib/time";
|
||||
|
||||
export interface TodayData {
|
||||
|
@ -11,7 +11,7 @@ export interface TodayData {
|
|||
lines: string[];
|
||||
}
|
||||
|
||||
export function gatherData(data: Stats.GraphsResponse): TodayData {
|
||||
export function gatherData(data: GraphsResponse): TodayData {
|
||||
let lines: string[];
|
||||
const today = data.today!;
|
||||
if (today.answerCount) {
|
||||
|
|
|
@ -1,12 +1,12 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { OpChanges } from "@tslib/anki/collection_pb";
|
||||
import { addImageOcclusionNote, updateImageOcclusionNote } from "@tslib/anki/image_occlusion_service";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { get } from "svelte/store";
|
||||
|
||||
import type { Collection } from "../lib/proto";
|
||||
import type { IOMode } from "./lib";
|
||||
import { addImageOcclusionNote, updateImageOcclusionNote } from "./lib";
|
||||
import { exportShapesToClozeDeletions } from "./shapes/to-cloze";
|
||||
import { notesDataStore, tagsWritable } from "./store";
|
||||
import Toast from "./Toast.svelte";
|
||||
|
@ -29,29 +29,29 @@ export const addOrUpdateNote = async function(
|
|||
backExtra = header ? `<div>${backExtra}</div>` : "";
|
||||
|
||||
if (mode.kind == "edit") {
|
||||
const result = await updateImageOcclusionNote(
|
||||
mode.noteId,
|
||||
occlusionCloze,
|
||||
const result = await updateImageOcclusionNote({
|
||||
noteId: BigInt(mode.noteId),
|
||||
occlusions: occlusionCloze,
|
||||
header,
|
||||
backExtra,
|
||||
tags,
|
||||
);
|
||||
});
|
||||
showResult(mode.noteId, result, noteCount);
|
||||
} else {
|
||||
const result = await addImageOcclusionNote(
|
||||
mode.notetypeId,
|
||||
mode.imagePath,
|
||||
occlusionCloze,
|
||||
const result = await addImageOcclusionNote({
|
||||
notetypeId: BigInt(mode.notetypeId),
|
||||
imagePath: mode.imagePath,
|
||||
occlusions: occlusionCloze,
|
||||
header,
|
||||
backExtra,
|
||||
tags,
|
||||
);
|
||||
});
|
||||
showResult(null, result, noteCount);
|
||||
}
|
||||
};
|
||||
|
||||
// show toast message
|
||||
const showResult = (noteId: number | null, result: Collection.OpChanges, count: number) => {
|
||||
const showResult = (noteId: number | null, result: OpChanges, count: number) => {
|
||||
const toastComponent = new Toast({
|
||||
target: document.body,
|
||||
props: {
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { Collection } from "../lib/proto";
|
||||
import { ImageOcclusion, imageOcclusion } from "../lib/proto";
|
||||
|
||||
export interface IOAddingMode {
|
||||
kind: "add";
|
||||
notetypeId: number;
|
||||
|
@ -16,61 +13,3 @@ export interface IOEditingMode {
|
|||
}
|
||||
|
||||
export type IOMode = IOAddingMode | IOEditingMode;
|
||||
|
||||
export async function getImageForOcclusion(
|
||||
path: string,
|
||||
): Promise<ImageOcclusion.GetImageForOcclusionResponse> {
|
||||
return imageOcclusion.getImageForOcclusion(
|
||||
ImageOcclusion.GetImageForOcclusionRequest.create({
|
||||
path,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export async function addImageOcclusionNote(
|
||||
notetypeId: number,
|
||||
imagePath: string,
|
||||
occlusions: string,
|
||||
header: string,
|
||||
backExtra: string,
|
||||
tags: string[],
|
||||
): Promise<Collection.OpChanges> {
|
||||
return imageOcclusion.addImageOcclusionNote(
|
||||
ImageOcclusion.AddImageOcclusionNoteRequest.create({
|
||||
notetypeId,
|
||||
imagePath,
|
||||
occlusions,
|
||||
header,
|
||||
backExtra,
|
||||
tags,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export async function getImageOcclusionNote(
|
||||
noteId: number,
|
||||
): Promise<ImageOcclusion.GetImageOcclusionNoteResponse> {
|
||||
return imageOcclusion.getImageOcclusionNote(
|
||||
ImageOcclusion.GetImageOcclusionNoteRequest.create({
|
||||
noteId,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
export async function updateImageOcclusionNote(
|
||||
noteId: number,
|
||||
occlusions: string,
|
||||
header: string,
|
||||
backExtra: string,
|
||||
tags: string[],
|
||||
): Promise<Collection.OpChanges> {
|
||||
return imageOcclusion.updateImageOcclusionNote(
|
||||
ImageOcclusion.UpdateImageOcclusionNoteRequest.create({
|
||||
noteId,
|
||||
occlusions,
|
||||
header,
|
||||
backExtra,
|
||||
tags,
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import { protoBase64 } from "@bufbuild/protobuf";
|
||||
import { getImageForOcclusion, getImageOcclusionNote } from "@tslib/anki/image_occlusion_service";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { ImageOcclusion } from "@tslib/proto";
|
||||
import { fabric } from "fabric";
|
||||
import type { PanZoom } from "panzoom";
|
||||
import protobuf from "protobufjs";
|
||||
import { get } from "svelte/store";
|
||||
|
||||
import { optimumCssSizeForCanvas } from "./canvas-scale";
|
||||
import { getImageForOcclusion, getImageOcclusionNote } from "./lib";
|
||||
import { notesDataStore, tagsWritable, zoomResetValue } from "./store";
|
||||
import Toast from "./Toast.svelte";
|
||||
import { addShapesToCanvasFromCloze } from "./tools/add-from-cloze";
|
||||
|
@ -18,7 +17,7 @@ import { undoRedoInit } from "./tools/tool-undo-redo";
|
|||
import type { Size } from "./types";
|
||||
|
||||
export const setupMaskEditor = async (path: string, instance: PanZoom): Promise<fabric.Canvas> => {
|
||||
const imageData = await getImageForOcclusion(path!);
|
||||
const imageData = await getImageForOcclusion({ path });
|
||||
const canvas = initCanvas();
|
||||
|
||||
// get image width and height
|
||||
|
@ -37,8 +36,9 @@ export const setupMaskEditor = async (path: string, instance: PanZoom): Promise<
|
|||
};
|
||||
|
||||
export const setupMaskEditorForEdit = async (noteId: number, instance: PanZoom): Promise<fabric.Canvas> => {
|
||||
const clozeNoteResponse: ImageOcclusion.GetImageOcclusionNoteResponse = await getImageOcclusionNote(noteId);
|
||||
if (clozeNoteResponse.error) {
|
||||
const clozeNoteResponse = await getImageOcclusionNote({ noteId: BigInt(noteId) });
|
||||
const kind = clozeNoteResponse.value?.case;
|
||||
if (!kind || kind === "error") {
|
||||
new Toast({
|
||||
target: document.body,
|
||||
props: {
|
||||
|
@ -49,7 +49,7 @@ export const setupMaskEditorForEdit = async (noteId: number, instance: PanZoom):
|
|||
return;
|
||||
}
|
||||
|
||||
const clozeNote = clozeNoteResponse.note!;
|
||||
const clozeNote = clozeNoteResponse.value.value;
|
||||
const canvas = initCanvas();
|
||||
|
||||
// get image width and height
|
||||
|
@ -84,11 +84,7 @@ const initCanvas = (): fabric.Canvas => {
|
|||
};
|
||||
|
||||
const getImageData = (imageData): string => {
|
||||
const b64encoded = protobuf.util.base64.encode(
|
||||
imageData,
|
||||
0,
|
||||
imageData.length,
|
||||
);
|
||||
const b64encoded = protoBase64.enc(imageData);
|
||||
return "data:image/png;base64," + b64encoded;
|
||||
};
|
||||
|
||||
|
|
|
@ -3,23 +3,23 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { CsvMetadata_MatchScope } from "@tslib/anki/import_export_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { ImportExport } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Row from "../components/Row.svelte";
|
||||
import Select from "../components/Select.svelte";
|
||||
import SelectOption from "../components/SelectOption.svelte";
|
||||
|
||||
export let matchScope: ImportExport.CsvMetadata.MatchScope;
|
||||
export let matchScope: CsvMetadata_MatchScope;
|
||||
|
||||
const matchScopes = [
|
||||
{
|
||||
value: ImportExport.CsvMetadata.MatchScope.NOTETYPE,
|
||||
value: CsvMetadata_MatchScope.NOTETYPE,
|
||||
label: tr.notetypesNotetype(),
|
||||
},
|
||||
{
|
||||
value: ImportExport.CsvMetadata.MatchScope.NOTETYPE_AND_DECK,
|
||||
value: CsvMetadata_MatchScope.NOTETYPE_AND_DECK,
|
||||
label: tr.importingNotetypeAndDeck(),
|
||||
},
|
||||
];
|
||||
|
|
|
@ -3,16 +3,16 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { DeckNameId } from "@tslib/anki/decks_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Decks } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Row from "../components/Row.svelte";
|
||||
import Select from "../components/Select.svelte";
|
||||
import SelectOption from "../components/SelectOption.svelte";
|
||||
|
||||
export let deckNameIds: Decks.DeckNameId[];
|
||||
export let deckId: number;
|
||||
export let deckNameIds: DeckNameId[];
|
||||
export let deckId: bigint;
|
||||
|
||||
$: label = deckNameIds.find((d) => d.id === deckId)?.name.replace(/^.+::/, "...");
|
||||
</script>
|
||||
|
|
|
@ -3,18 +3,17 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { CsvMetadata_Delimiter as Delimiter } from "@tslib/anki/import_export_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { ImportExport } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Row from "../components/Row.svelte";
|
||||
import Select from "../components/Select.svelte";
|
||||
import SelectOption from "../components/SelectOption.svelte";
|
||||
|
||||
export let delimiter: ImportExport.CsvMetadata.Delimiter;
|
||||
export let delimiter: Delimiter;
|
||||
export let disabled: boolean;
|
||||
|
||||
const Delimiter = ImportExport.CsvMetadata.Delimiter;
|
||||
const delimiters = [
|
||||
{ value: Delimiter.TAB, label: tr.importingTab() },
|
||||
{ value: Delimiter.PIPE, label: tr.importingPipe() },
|
||||
|
|
|
@ -3,27 +3,27 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import { CsvMetadata_DupeResolution as DupeResolution } from "@tslib/anki/import_export_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { ImportExport } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Row from "../components/Row.svelte";
|
||||
import Select from "../components/Select.svelte";
|
||||
import SelectOption from "../components/SelectOption.svelte";
|
||||
|
||||
export let dupeResolution: ImportExport.CsvMetadata.DupeResolution;
|
||||
export let dupeResolution: DupeResolution;
|
||||
|
||||
const dupeResolutions = [
|
||||
{
|
||||
value: ImportExport.CsvMetadata.DupeResolution.UPDATE,
|
||||
value: DupeResolution.UPDATE,
|
||||
label: tr.importingUpdate(),
|
||||
},
|
||||
{
|
||||
value: ImportExport.CsvMetadata.DupeResolution.DUPLICATE,
|
||||
value: DupeResolution.DUPLICATE,
|
||||
label: tr.importingDuplicate(),
|
||||
},
|
||||
{
|
||||
value: ImportExport.CsvMetadata.DupeResolution.PRESERVE,
|
||||
value: DupeResolution.PRESERVE,
|
||||
label: tr.importingPreserve(),
|
||||
},
|
||||
];
|
||||
|
|
|
@ -3,19 +3,19 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { CsvMetadata_MappedNotetype } from "@tslib/anki/import_export_pb";
|
||||
import { getFieldNames } from "@tslib/anki/notetypes_service";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { ImportExport } from "@tslib/proto";
|
||||
|
||||
import Spacer from "../components/Spacer.svelte";
|
||||
import type { ColumnOption } from "./lib";
|
||||
import { getNotetypeFields } from "./lib";
|
||||
import MapperRow from "./MapperRow.svelte";
|
||||
|
||||
export let columnOptions: ColumnOption[];
|
||||
export let tagsColumn: number;
|
||||
export let globalNotetype: ImportExport.CsvMetadata.MappedNotetype | null;
|
||||
export let globalNotetype: CsvMetadata_MappedNotetype | null;
|
||||
|
||||
let lastNotetypeId: number | undefined = -1;
|
||||
let lastNotetypeId: bigint | undefined = -1n;
|
||||
let fieldNamesPromise: Promise<string[]>;
|
||||
|
||||
$: if (globalNotetype?.id !== lastNotetypeId) {
|
||||
|
@ -23,7 +23,7 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
fieldNamesPromise =
|
||||
globalNotetype === null
|
||||
? Promise.resolve([])
|
||||
: getNotetypeFields(globalNotetype.id);
|
||||
: getFieldNames({ ntid: globalNotetype.id }).then((list) => list.vals);
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -3,9 +3,17 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { DeckNameId } from "@tslib/anki/decks_pb";
|
||||
import type { StringList } from "@tslib/anki/generic_pb";
|
||||
import type {
|
||||
CsvMetadata_Delimiter,
|
||||
CsvMetadata_DupeResolution,
|
||||
CsvMetadata_MappedNotetype,
|
||||
CsvMetadata_MatchScope,
|
||||
} from "@tslib/anki/import_export_pb";
|
||||
import { getCsvMetadata, importCsv } from "@tslib/anki/import_export_service";
|
||||
import type { NotetypeNameId } from "@tslib/anki/notetypes_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Decks, Generic, Notetypes } from "@tslib/proto";
|
||||
import { ImportExport, importExport } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Container from "../components/Container.svelte";
|
||||
|
@ -18,19 +26,24 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
import FieldMapper from "./FieldMapper.svelte";
|
||||
import Header from "./Header.svelte";
|
||||
import HtmlSwitch from "./HtmlSwitch.svelte";
|
||||
import { getColumnOptions, getCsvMetadata } from "./lib";
|
||||
import {
|
||||
buildDeckOneof,
|
||||
buildNotetypeOneof,
|
||||
getColumnOptions,
|
||||
tryGetDeckId,
|
||||
tryGetGlobalNotetype,
|
||||
} from "./lib";
|
||||
import NotetypeSelector from "./NotetypeSelector.svelte";
|
||||
import Preview from "./Preview.svelte";
|
||||
import StickyHeader from "./StickyHeader.svelte";
|
||||
import Tags from "./Tags.svelte";
|
||||
|
||||
export let path: string;
|
||||
export let notetypeNameIds: Notetypes.NotetypeNameId[];
|
||||
export let deckNameIds: Decks.DeckNameId[];
|
||||
export let dupeResolution: ImportExport.CsvMetadata.DupeResolution;
|
||||
export let matchScope: ImportExport.CsvMetadata.MatchScope;
|
||||
|
||||
export let delimiter: ImportExport.CsvMetadata.Delimiter;
|
||||
export let notetypeNameIds: NotetypeNameId[];
|
||||
export let deckNameIds: DeckNameId[];
|
||||
export let dupeResolution: CsvMetadata_DupeResolution;
|
||||
export let matchScope: CsvMetadata_MatchScope;
|
||||
export let delimiter: CsvMetadata_Delimiter;
|
||||
export let forceDelimiter: boolean;
|
||||
export let forceIsHtml: boolean;
|
||||
export let isHtml: boolean;
|
||||
|
@ -39,11 +52,11 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
export let columnLabels: string[];
|
||||
export let tagsColumn: number;
|
||||
export let guidColumn: number;
|
||||
export let preview: Generic.StringList[];
|
||||
export let preview: StringList[];
|
||||
// Protobuf oneofs. Exactly one of these pairs is expected to be set.
|
||||
export let notetypeColumn: number | null;
|
||||
export let globalNotetype: ImportExport.CsvMetadata.MappedNotetype | null;
|
||||
export let deckId: number | null;
|
||||
export let globalNotetype: CsvMetadata_MappedNotetype | null;
|
||||
export let deckId: bigint | null;
|
||||
export let deckColumn: number | null;
|
||||
|
||||
let lastNotetypeId = globalNotetype?.id;
|
||||
|
@ -56,45 +69,51 @@ License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
|||
deckColumn,
|
||||
guidColumn,
|
||||
);
|
||||
$: getCsvMetadata(path, delimiter, undefined, undefined, isHtml).then((meta) => {
|
||||
$: getCsvMetadata({
|
||||
path,
|
||||
delimiter,
|
||||
notetypeId: undefined,
|
||||
deckId: undefined,
|
||||
isHtml,
|
||||
}).then((meta) => {
|
||||
columnLabels = meta.columnLabels;
|
||||
preview = meta.preview;
|
||||
});
|
||||
$: if (globalNotetype?.id !== lastNotetypeId || delimiter !== lastDelimeter) {
|
||||
lastNotetypeId = globalNotetype?.id;
|
||||
lastDelimeter = delimiter;
|
||||
getCsvMetadata(path, delimiter, globalNotetype?.id, deckId || undefined).then(
|
||||
(meta) => {
|
||||
globalNotetype = meta.globalNotetype ?? null;
|
||||
deckId = meta.deckId ?? null;
|
||||
tagsColumn = meta.tagsColumn;
|
||||
},
|
||||
);
|
||||
getCsvMetadata({
|
||||
path,
|
||||
delimiter,
|
||||
notetypeId: globalNotetype?.id,
|
||||
deckId: deckId ?? undefined,
|
||||
}).then((meta) => {
|
||||
globalNotetype = tryGetGlobalNotetype(meta);
|
||||
deckId = tryGetDeckId(meta);
|
||||
tagsColumn = meta.tagsColumn;
|
||||
});
|
||||
}
|
||||
|
||||
async function onImport(): Promise<void> {
|
||||
await importExport.importCsv(
|
||||
ImportExport.ImportCsvRequest.create({
|
||||
path,
|
||||
metadata: ImportExport.CsvMetadata.create({
|
||||
dupeResolution,
|
||||
matchScope,
|
||||
delimiter,
|
||||
forceDelimiter,
|
||||
isHtml,
|
||||
forceIsHtml,
|
||||
globalTags,
|
||||
updatedTags,
|
||||
columnLabels,
|
||||
tagsColumn,
|
||||
guidColumn,
|
||||
notetypeColumn,
|
||||
globalNotetype,
|
||||
deckColumn,
|
||||
deckId,
|
||||
}),
|
||||
}),
|
||||
);
|
||||
await importCsv({
|
||||
path,
|
||||
metadata: {
|
||||
dupeResolution,
|
||||
matchScope,
|
||||
delimiter,
|
||||
forceDelimiter,
|
||||
isHtml,
|
||||
forceIsHtml,
|
||||
globalTags,
|
||||
updatedTags,
|
||||
columnLabels,
|
||||
tagsColumn,
|
||||
guidColumn,
|
||||
deck: buildDeckOneof(deckColumn, deckId),
|
||||
notetype: buildNotetypeOneof(globalNotetype, notetypeColumn),
|
||||
preview: [],
|
||||
},
|
||||
});
|
||||
}
|
||||
</script>
|
||||
|
||||
|
|
|
@ -3,16 +3,16 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { NotetypeNameId } from "@tslib/anki/notetypes_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import type { Notetypes } from "@tslib/proto";
|
||||
|
||||
import Col from "../components/Col.svelte";
|
||||
import Row from "../components/Row.svelte";
|
||||
import Select from "../components/Select.svelte";
|
||||
import SelectOption from "../components/SelectOption.svelte";
|
||||
|
||||
export let notetypeNameIds: Notetypes.NotetypeNameId[];
|
||||
export let notetypeId: number;
|
||||
export let notetypeNameIds: NotetypeNameId[];
|
||||
export let notetypeId: bigint;
|
||||
|
||||
$: label = notetypeNameIds.find((n) => n.id === notetypeId)?.name;
|
||||
</script>
|
||||
|
|
|
@ -3,12 +3,12 @@ Copyright: Ankitects Pty Ltd and contributors
|
|||
License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
-->
|
||||
<script lang="ts">
|
||||
import type { Generic } from "@tslib/proto";
|
||||
import type { StringList } from "@tslib/anki/generic_pb";
|
||||
|
||||
import type { ColumnOption } from "./lib";
|
||||
|
||||
export let columnOptions: ColumnOption[];
|
||||
export let preview: Generic.StringList[];
|
||||
export let preview: StringList[];
|
||||
</script>
|
||||
|
||||
<div class="outer">
|
||||
|
|
|
@ -3,21 +3,15 @@
|
|||
|
||||
import "./import-csv-base.scss";
|
||||
|
||||
import { getDeckNames } from "@tslib/anki/decks_service";
|
||||
import { getCsvMetadata } from "@tslib/anki/import_export_service";
|
||||
import { getNotetypeNames } from "@tslib/anki/notetypes_service";
|
||||
import { ModuleName, setupI18n } from "@tslib/i18n";
|
||||
import { checkNightMode } from "@tslib/nightmode";
|
||||
import type { ImportExport, Notetypes } from "@tslib/proto";
|
||||
import { Decks, decks as decksService, empty, notetypes as notetypeService } from "@tslib/proto";
|
||||
|
||||
import ImportCsvPage from "./ImportCsvPage.svelte";
|
||||
import { getCsvMetadata } from "./lib";
|
||||
import { tryGetDeckColumn, tryGetDeckId, tryGetGlobalNotetype, tryGetNotetypeColumn } from "./lib";
|
||||
|
||||
const gettingNotetypes = notetypeService.getNotetypeNames(empty);
|
||||
const gettingDecks = decksService.getDeckNames(
|
||||
Decks.GetDeckNamesRequest.create({
|
||||
skipEmptyDefault: false,
|
||||
includeFiltered: false,
|
||||
}),
|
||||
);
|
||||
const i18n = setupI18n({
|
||||
modules: [
|
||||
ModuleName.ACTIONS,
|
||||
|
@ -32,22 +26,15 @@ const i18n = setupI18n({
|
|||
});
|
||||
|
||||
export async function setupImportCsvPage(path: string): Promise<ImportCsvPage> {
|
||||
const gettingMetadata = getCsvMetadata(path);
|
||||
|
||||
let notetypes: Notetypes.NotetypeNames;
|
||||
let decks: Decks.DeckNames;
|
||||
let metadata: ImportExport.CsvMetadata;
|
||||
try {
|
||||
[notetypes, decks, metadata] = await Promise.all([
|
||||
gettingNotetypes,
|
||||
gettingDecks,
|
||||
gettingMetadata,
|
||||
i18n,
|
||||
]);
|
||||
} catch (err) {
|
||||
alert(err);
|
||||
throw (err);
|
||||
}
|
||||
const [notetypes, decks, metadata, _i18n] = await Promise.all([
|
||||
getNotetypeNames({}),
|
||||
getDeckNames({
|
||||
skipEmptyDefault: false,
|
||||
includeFiltered: false,
|
||||
}),
|
||||
getCsvMetadata({ path }),
|
||||
i18n,
|
||||
]);
|
||||
|
||||
checkNightMode();
|
||||
|
||||
|
@ -68,13 +55,13 @@ export async function setupImportCsvPage(path: string): Promise<ImportCsvPage> {
|
|||
columnLabels: metadata.columnLabels,
|
||||
tagsColumn: metadata.tagsColumn,
|
||||
guidColumn: metadata.guidColumn,
|
||||
globalNotetype: metadata.globalNotetype ?? null,
|
||||
preview: metadata.preview,
|
||||
globalNotetype: tryGetGlobalNotetype(metadata),
|
||||
// Unset oneof numbers default to 0, which also means n/a here,
|
||||
// but it's vital to differentiate between unset and 0 when reserializing.
|
||||
notetypeColumn: metadata.notetypeColumn ? metadata.notetypeColumn : null,
|
||||
deckId: metadata.deckId ? metadata.deckId : null,
|
||||
deckColumn: metadata.deckColumn ? metadata.deckColumn : null,
|
||||
notetypeColumn: tryGetNotetypeColumn(metadata),
|
||||
deckId: tryGetDeckId(metadata),
|
||||
deckColumn: tryGetDeckColumn(metadata),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import type { CsvMetadata, CsvMetadata_MappedNotetype } from "@tslib/anki/import_export_pb";
|
||||
import * as tr from "@tslib/ftl";
|
||||
import { ImportExport, importExport, Notetypes, notetypes as notetypeService } from "@tslib/proto";
|
||||
|
||||
export interface ColumnOption {
|
||||
label: string;
|
||||
|
@ -50,26 +50,42 @@ function columnOption(
|
|||
};
|
||||
}
|
||||
|
||||
export async function getNotetypeFields(notetypeId: number): Promise<string[]> {
|
||||
return notetypeService
|
||||
.getFieldNames(Notetypes.NotetypeId.create({ ntid: notetypeId }))
|
||||
.then((list) => list.vals);
|
||||
export function tryGetGlobalNotetype(meta: CsvMetadata): CsvMetadata_MappedNotetype | null {
|
||||
return meta.notetype.case === "globalNotetype" ? meta.notetype.value : null;
|
||||
}
|
||||
|
||||
export async function getCsvMetadata(
|
||||
path: string,
|
||||
delimiter?: ImportExport.CsvMetadata.Delimiter,
|
||||
notetypeId?: number,
|
||||
deckId?: number,
|
||||
isHtml?: boolean,
|
||||
): Promise<ImportExport.CsvMetadata> {
|
||||
return importExport.getCsvMetadata(
|
||||
ImportExport.CsvMetadataRequest.create({
|
||||
path,
|
||||
delimiter,
|
||||
notetypeId,
|
||||
deckId,
|
||||
isHtml,
|
||||
}),
|
||||
);
|
||||
export function tryGetDeckId(meta: CsvMetadata): bigint | null {
|
||||
return meta.deck.case === "deckId" ? meta.deck.value : null;
|
||||
}
|
||||
|
||||
export function tryGetDeckColumn(meta: CsvMetadata): number | null {
|
||||
return meta.deck.case === "deckColumn" ? meta.deck.value : null;
|
||||
}
|
||||
|
||||
export function tryGetNotetypeColumn(meta: CsvMetadata): number | null {
|
||||
return meta.notetype.case === "notetypeColumn" ? meta.notetype.value : null;
|
||||
}
|
||||
|
||||
export function buildDeckOneof(
|
||||
deckColumn: number | null,
|
||||
deckId: bigint | null,
|
||||
): CsvMetadata["deck"] {
|
||||
if (deckColumn !== null) {
|
||||
return { case: "deckColumn", value: deckColumn };
|
||||
} else if (deckId !== null) {
|
||||
return { case: "deckId", value: deckId };
|
||||
}
|
||||
throw new Error("missing column/id");
|
||||
}
|
||||
|
||||
export function buildNotetypeOneof(
|
||||
globalNotetype: CsvMetadata_MappedNotetype | null,
|
||||
notetypeColumn: number | null,
|
||||
): CsvMetadata["notetype"] {
|
||||
if (globalNotetype !== null) {
|
||||
return { case: "globalNotetype", value: globalNotetype };
|
||||
} else if (notetypeColumn !== null) {
|
||||
return { case: "notetypeColumn", value: notetypeColumn };
|
||||
}
|
||||
throw new Error("missing column/id");
|
||||
}
|
||||
|
|
|
@ -4,8 +4,8 @@
|
|||
import "intl-pluralrules";
|
||||
|
||||
import { FluentBundle, FluentResource } from "@fluent/bundle";
|
||||
import { i18nResources } from "@tslib/anki/i18n_service";
|
||||
|
||||
import { I18n, i18n } from "../proto";
|
||||
import { firstLanguage, setBundles } from "./bundles";
|
||||
import type { ModuleName } from "./modules";
|
||||
|
||||
|
@ -75,7 +75,7 @@ export function withoutUnicodeIsolation(s: string): string {
|
|||
}
|
||||
|
||||
export async function setupI18n(args: { modules: ModuleName[] }): Promise<void> {
|
||||
const resources = await i18n.i18nResources(I18n.I18nResourcesRequest.create(args));
|
||||
const resources = await i18nResources(args);
|
||||
const json = JSON.parse(new TextDecoder().decode(resources.json));
|
||||
|
||||
const newBundles: FluentBundle[] = [];
|
||||
|
|
|
@ -0,0 +1,48 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
export interface PostProtoOptions {
|
||||
/** True by default. Shows a dialog with the error message, then rethrows. */
|
||||
alertOnError?: boolean;
|
||||
}
|
||||
|
||||
export async function postProto<T>(
|
||||
method: string,
|
||||
input: { toBinary(): Uint8Array; getType(): { typeName: string } },
|
||||
outputType: { fromBinary(arr: Uint8Array): T },
|
||||
{ alertOnError = true }: PostProtoOptions,
|
||||
): Promise<T> {
|
||||
try {
|
||||
const inputBytes = input.toBinary();
|
||||
const path = `/_anki/${method}`;
|
||||
const outputBytes = await postProtoInner(path, inputBytes);
|
||||
return outputType.fromBinary(outputBytes);
|
||||
} catch (err) {
|
||||
if (alertOnError) {
|
||||
alert(err);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function postProtoInner(url: string, body: Uint8Array): Promise<Uint8Array> {
|
||||
const result = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/octet-stream",
|
||||
},
|
||||
body,
|
||||
});
|
||||
if (!result.ok) {
|
||||
let msg = "something went wrong";
|
||||
try {
|
||||
msg = await result.text();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
throw new Error(`${result.status}: ${msg}`);
|
||||
}
|
||||
const blob = await result.blob();
|
||||
const respBuf = await new Response(blob).arrayBuffer();
|
||||
return new Uint8Array(respBuf);
|
||||
}
|
|
@ -1,26 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
export async function postRequest(
|
||||
path: string,
|
||||
body: string | Uint8Array,
|
||||
headers: Record<string, string> = {},
|
||||
): Promise<Uint8Array> {
|
||||
if (body instanceof Uint8Array) {
|
||||
headers["Content-type"] = "application/octet-stream";
|
||||
}
|
||||
const resp = await fetch(path, {
|
||||
method: "POST",
|
||||
headers,
|
||||
body,
|
||||
});
|
||||
if (!resp.ok) {
|
||||
const body = await resp.text();
|
||||
throw Error(`${resp.status}: ${body}`);
|
||||
}
|
||||
// get returned bytes
|
||||
const respBlob = await resp.blob();
|
||||
const respBuf = await new Response(respBlob).arrayBuffer();
|
||||
const bytes = new Uint8Array(respBuf);
|
||||
return bytes;
|
||||
}
|
|
@ -1,94 +0,0 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
/* eslint
|
||||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
import type { Message, rpc, RPCImpl, RPCImplCallback } from "protobufjs";
|
||||
|
||||
import { anki } from "../../out/ts/lib/backend_proto";
|
||||
|
||||
import Cards = anki.cards;
|
||||
import Collection = anki.collection;
|
||||
import DeckConfig = anki.deckconfig;
|
||||
import Decks = anki.decks;
|
||||
import Generic = anki.generic;
|
||||
import I18n = anki.i18n;
|
||||
import ImageOcclusion = anki.image_occlusion;
|
||||
import ImportExport = anki.import_export;
|
||||
import Notes = anki.notes;
|
||||
import Notetypes = anki.notetypes;
|
||||
import Scheduler = anki.scheduler;
|
||||
import Stats = anki.stats;
|
||||
import Tags = anki.tags;
|
||||
|
||||
export { Cards, Collection, Decks, Generic, Notes };
|
||||
|
||||
export const empty = Generic.Empty.create();
|
||||
|
||||
export class InternalError extends Error {}
|
||||
|
||||
async function serviceCallback(
|
||||
method: rpc.ServiceMethod<Message<any>, Message<any>>,
|
||||
requestData: Uint8Array,
|
||||
callback: RPCImplCallback,
|
||||
): Promise<void> {
|
||||
const headers = new Headers();
|
||||
headers.set("Content-type", "application/octet-stream");
|
||||
|
||||
const methodName = method.name[0].toLowerCase() + method.name.substring(1);
|
||||
const path = `/_anki/${methodName}`;
|
||||
|
||||
try {
|
||||
const result = await fetch(path, {
|
||||
method: "POST",
|
||||
headers,
|
||||
body: requestData,
|
||||
});
|
||||
|
||||
if (result.status == 500) {
|
||||
callback(new InternalError(await result.text()), null);
|
||||
return;
|
||||
}
|
||||
|
||||
const blob = await result.blob();
|
||||
const respBuf = await new Response(blob).arrayBuffer();
|
||||
const uint8Array = new Uint8Array(respBuf);
|
||||
|
||||
callback(null, uint8Array);
|
||||
} catch (error) {
|
||||
console.log("error caught");
|
||||
callback(error as Error, null);
|
||||
}
|
||||
}
|
||||
|
||||
export const decks = Decks.DecksService.create(serviceCallback as RPCImpl);
|
||||
|
||||
export { DeckConfig };
|
||||
export const deckConfig = DeckConfig.DeckConfigService.create(
|
||||
serviceCallback as RPCImpl,
|
||||
);
|
||||
|
||||
export { I18n };
|
||||
export const i18n = I18n.I18nService.create(serviceCallback as RPCImpl);
|
||||
|
||||
export { ImportExport };
|
||||
export const importExport = ImportExport.ImportExportService.create(
|
||||
serviceCallback as RPCImpl,
|
||||
);
|
||||
|
||||
export { Notetypes };
|
||||
export const notetypes = Notetypes.NotetypesService.create(serviceCallback as RPCImpl);
|
||||
|
||||
export { Scheduler };
|
||||
export const scheduler = Scheduler.SchedulerService.create(serviceCallback as RPCImpl);
|
||||
|
||||
export { Stats };
|
||||
export const stats = Stats.StatsService.create(serviceCallback as RPCImpl);
|
||||
|
||||
export { Tags };
|
||||
export const tags = Tags.TagsService.create(serviceCallback as RPCImpl);
|
||||
|
||||
export { ImageOcclusion };
|
||||
export const imageOcclusion = ImageOcclusion.ImageOcclusionService.create(serviceCallback as RPCImpl);
|
124
ts/licenses.json
124
ts/licenses.json
|
@ -1,4 +1,10 @@
|
|||
{
|
||||
"@bufbuild/protobuf@1.2.1": {
|
||||
"licenses": "(Apache-2.0 AND BSD-3-Clause)",
|
||||
"repository": "https://github.com/bufbuild/protobuf-es",
|
||||
"path": "node_modules/@bufbuild/protobuf",
|
||||
"licenseFile": "node_modules/@bufbuild/protobuf/README.md"
|
||||
},
|
||||
"@floating-ui/core@0.5.1": {
|
||||
"licenses": "MIT",
|
||||
"repository": "https://github.com/floating-ui/floating-ui",
|
||||
|
@ -44,86 +50,6 @@
|
|||
"path": "node_modules/@popperjs/core",
|
||||
"licenseFile": "node_modules/@popperjs/core/LICENSE.md"
|
||||
},
|
||||
"@protobufjs/aspromise@1.1.2": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/aspromise",
|
||||
"licenseFile": "node_modules/@protobufjs/aspromise/LICENSE"
|
||||
},
|
||||
"@protobufjs/base64@1.1.2": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/base64",
|
||||
"licenseFile": "node_modules/@protobufjs/base64/LICENSE"
|
||||
},
|
||||
"@protobufjs/codegen@2.0.4": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/codegen",
|
||||
"licenseFile": "node_modules/@protobufjs/codegen/LICENSE"
|
||||
},
|
||||
"@protobufjs/eventemitter@1.1.0": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/eventemitter",
|
||||
"licenseFile": "node_modules/@protobufjs/eventemitter/LICENSE"
|
||||
},
|
||||
"@protobufjs/fetch@1.1.0": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/fetch",
|
||||
"licenseFile": "node_modules/@protobufjs/fetch/LICENSE"
|
||||
},
|
||||
"@protobufjs/float@1.0.2": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/float",
|
||||
"licenseFile": "node_modules/@protobufjs/float/LICENSE"
|
||||
},
|
||||
"@protobufjs/inquire@1.1.0": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/inquire",
|
||||
"licenseFile": "node_modules/@protobufjs/inquire/LICENSE"
|
||||
},
|
||||
"@protobufjs/path@1.1.2": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/path",
|
||||
"licenseFile": "node_modules/@protobufjs/path/LICENSE"
|
||||
},
|
||||
"@protobufjs/pool@1.1.0": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/pool",
|
||||
"licenseFile": "node_modules/@protobufjs/pool/LICENSE"
|
||||
},
|
||||
"@protobufjs/utf8@1.1.0": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/dcodeIO/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/@protobufjs/utf8",
|
||||
"licenseFile": "node_modules/@protobufjs/utf8/LICENSE"
|
||||
},
|
||||
"@tootallnate/once@2.0.0": {
|
||||
"licenses": "MIT",
|
||||
"repository": "https://github.com/TooTallNate/once",
|
||||
|
@ -151,12 +77,6 @@
|
|||
"path": "node_modules/@types/marked",
|
||||
"licenseFile": "node_modules/@types/marked/LICENSE"
|
||||
},
|
||||
"@types/node@18.11.18": {
|
||||
"licenses": "MIT",
|
||||
"repository": "https://github.com/DefinitelyTyped/DefinitelyTyped",
|
||||
"path": "node_modules/protobufjs/node_modules/@types/node",
|
||||
"licenseFile": "node_modules/protobufjs/node_modules/@types/node/LICENSE"
|
||||
},
|
||||
"abab@2.0.5": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/jsdom/abab",
|
||||
|
@ -189,14 +109,14 @@
|
|||
"acorn@7.4.1": {
|
||||
"licenses": "MIT",
|
||||
"repository": "https://github.com/acornjs/acorn",
|
||||
"path": "node_modules/acorn-globals/node_modules/acorn",
|
||||
"licenseFile": "node_modules/acorn-globals/node_modules/acorn/LICENSE"
|
||||
"path": "node_modules/acorn",
|
||||
"licenseFile": "node_modules/acorn/LICENSE"
|
||||
},
|
||||
"acorn@8.7.0": {
|
||||
"licenses": "MIT",
|
||||
"repository": "https://github.com/acornjs/acorn",
|
||||
"path": "node_modules/acorn",
|
||||
"licenseFile": "node_modules/acorn/LICENSE"
|
||||
"path": "node_modules/jsdom/node_modules/acorn",
|
||||
"licenseFile": "node_modules/jsdom/node_modules/acorn/LICENSE"
|
||||
},
|
||||
"agent-base@6.0.2": {
|
||||
"licenses": "MIT",
|
||||
|
@ -963,8 +883,8 @@
|
|||
"repository": "https://github.com/gkz/levn",
|
||||
"publisher": "George Zahariev",
|
||||
"email": "z@georgezahariev.com",
|
||||
"path": "node_modules/optionator/node_modules/levn",
|
||||
"licenseFile": "node_modules/optionator/node_modules/levn/LICENSE"
|
||||
"path": "node_modules/escodegen/node_modules/levn",
|
||||
"licenseFile": "node_modules/escodegen/node_modules/levn/LICENSE"
|
||||
},
|
||||
"lodash-es@4.17.21": {
|
||||
"licenses": "MIT",
|
||||
|
@ -974,14 +894,6 @@
|
|||
"path": "node_modules/lodash-es",
|
||||
"licenseFile": "node_modules/lodash-es/LICENSE"
|
||||
},
|
||||
"long@5.2.1": {
|
||||
"licenses": "Apache-2.0",
|
||||
"repository": "https://github.com/dcodeIO/long.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode@dcode.io",
|
||||
"path": "node_modules/long",
|
||||
"licenseFile": "node_modules/long/LICENSE"
|
||||
},
|
||||
"lru-cache@6.0.0": {
|
||||
"licenses": "ISC",
|
||||
"repository": "https://github.com/isaacs/node-lru-cache",
|
||||
|
@ -1151,8 +1063,8 @@
|
|||
"repository": "https://github.com/gkz/optionator",
|
||||
"publisher": "George Zahariev",
|
||||
"email": "z@georgezahariev.com",
|
||||
"path": "node_modules/optionator",
|
||||
"licenseFile": "node_modules/optionator/LICENSE"
|
||||
"path": "node_modules/escodegen/node_modules/optionator",
|
||||
"licenseFile": "node_modules/escodegen/node_modules/optionator/LICENSE"
|
||||
},
|
||||
"panzoom@9.4.3": {
|
||||
"licenses": "MIT",
|
||||
|
@ -1187,14 +1099,6 @@
|
|||
"path": "node_modules/prelude-ls",
|
||||
"licenseFile": "node_modules/prelude-ls/LICENSE"
|
||||
},
|
||||
"protobufjs@7.2.1": {
|
||||
"licenses": "BSD-3-Clause",
|
||||
"repository": "https://github.com/protobufjs/protobuf.js",
|
||||
"publisher": "Daniel Wirtz",
|
||||
"email": "dcode+protobufjs@dcode.io",
|
||||
"path": "node_modules/protobufjs",
|
||||
"licenseFile": "node_modules/protobufjs/LICENSE"
|
||||
},
|
||||
"psl@1.8.0": {
|
||||
"licenses": "MIT",
|
||||
"repository": "https://github.com/lupomontero/psl",
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
diff --git a/node_modules/protobufjs/src/root.js b/node_modules/protobufjs/src/root.js
|
||||
index 6067ca6..78d25f2 100644
|
||||
--- a/node_modules/protobufjs/src/root.js
|
||||
+++ b/node_modules/protobufjs/src/root.js
|
||||
@@ -259,7 +259,7 @@ Root.prototype.resolveAll = function resolveAll() {
|
||||
};
|
||||
|
||||
// only uppercased (and thus conflict-free) children are exposed, see below
|
||||
-var exposeRe = /^[A-Z]/;
|
||||
+var exposeRe = /^[A-Za-z]/;
|
||||
|
||||
/**
|
||||
* Handles a deferred declaring extension field by creating a sister field to represent it within its extended type.
|
||||
diff --git a/node_modules/protobufjs/src/util/minimal.js b/node_modules/protobufjs/src/util/minimal.js
|
||||
index 35008ec..20394ab 100644
|
||||
--- a/node_modules/protobufjs/src/util/minimal.js
|
||||
+++ b/node_modules/protobufjs/src/util/minimal.js
|
||||
@@ -177,10 +177,7 @@ util.Array = typeof Uint8Array !== "undefined" ? Uint8Array /* istanbul ignore n
|
||||
* Long.js's Long class if available.
|
||||
* @type {Constructor<Long>}
|
||||
*/
|
||||
-util.Long = /* istanbul ignore next */ util.global.dcodeIO && /* istanbul ignore next */ util.global.dcodeIO.Long
|
||||
- || /* istanbul ignore next */ util.global.Long
|
||||
- || util.inquire("long");
|
||||
-
|
||||
+util.Long = null;
|
||||
/**
|
||||
* Regular expression used to verify 2 bit (`bool`) map keys.
|
||||
* @type {RegExp}
|
|
@ -1,8 +1,10 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import { postRequest } from "@tslib/postrequest";
|
||||
import { Scheduler } from "@tslib/proto";
|
||||
import type { JsonValue } from "@bufbuild/protobuf";
|
||||
import type { SchedulingContext, SchedulingStatesWithContext } from "@tslib/anki/scheduler_pb";
|
||||
import { SchedulingStates } from "@tslib/anki/scheduler_pb";
|
||||
import { getSchedulingStatesWithContext, setSchedulingStates } from "@tslib/anki/scheduler_service";
|
||||
|
||||
interface CustomDataStates {
|
||||
again: Record<string, unknown>;
|
||||
|
@ -11,21 +13,7 @@ interface CustomDataStates {
|
|||
easy: Record<string, unknown>;
|
||||
}
|
||||
|
||||
async function getSchedulingStatesWithContext(): Promise<Scheduler.SchedulingStatesWithContext> {
|
||||
return Scheduler.SchedulingStatesWithContext.decode(
|
||||
await postRequest("/_anki/getSchedulingStatesWithContext", ""),
|
||||
);
|
||||
}
|
||||
|
||||
async function setSchedulingStates(
|
||||
key: string,
|
||||
states: Scheduler.SchedulingStates,
|
||||
): Promise<void> {
|
||||
const bytes = Scheduler.SchedulingStates.encode(states).finish();
|
||||
await postRequest("/_anki/setSchedulingStates", bytes, { key });
|
||||
}
|
||||
|
||||
function unpackCustomData(states: Scheduler.SchedulingStates): CustomDataStates {
|
||||
function unpackCustomData(states: SchedulingStates): CustomDataStates {
|
||||
const toObject = (s: string): Record<string, unknown> => {
|
||||
try {
|
||||
return JSON.parse(s);
|
||||
|
@ -42,7 +30,7 @@ function unpackCustomData(states: Scheduler.SchedulingStates): CustomDataStates
|
|||
}
|
||||
|
||||
function packCustomData(
|
||||
states: Scheduler.SchedulingStates,
|
||||
states: SchedulingStates,
|
||||
customData: CustomDataStates,
|
||||
) {
|
||||
states.again!.customData = JSON.stringify(customData.again);
|
||||
|
@ -51,18 +39,34 @@ function packCustomData(
|
|||
states.easy!.customData = JSON.stringify(customData.easy);
|
||||
}
|
||||
|
||||
type StateMutatorFn = (states: JsonValue, customData: CustomDataStates, ctx: SchedulingContext) => Promise<void>;
|
||||
|
||||
export async function mutateNextCardStates(
|
||||
key: string,
|
||||
mutator: (
|
||||
states: Scheduler.SchedulingStates,
|
||||
customData: CustomDataStates,
|
||||
ctx: Scheduler.SchedulingContext,
|
||||
) => Promise<void>,
|
||||
transform: StateMutatorFn,
|
||||
): Promise<void> {
|
||||
const statesWithContext = await getSchedulingStatesWithContext();
|
||||
const states = statesWithContext.states!;
|
||||
const customData = unpackCustomData(states);
|
||||
await mutator(states, customData, statesWithContext.context!);
|
||||
packCustomData(states, customData);
|
||||
await setSchedulingStates(key, states);
|
||||
const statesWithContext = await getSchedulingStatesWithContext({});
|
||||
const updatedStates = await applyStateTransform(statesWithContext, transform);
|
||||
await setSchedulingStates({ key, states: updatedStates });
|
||||
}
|
||||
|
||||
/** Exported only for tests */
|
||||
export async function applyStateTransform(
|
||||
states: SchedulingStatesWithContext,
|
||||
transform: StateMutatorFn,
|
||||
): Promise<SchedulingStates> {
|
||||
// convert to JSON, which is the format existing transforms expect
|
||||
const statesJson = states.states!.toJson({ emitDefaultValues: true });
|
||||
|
||||
// decode customData and put it into each state
|
||||
const customData = unpackCustomData(states.states!);
|
||||
|
||||
// run the user function on the JSON
|
||||
await transform(statesJson, customData, states.context!);
|
||||
|
||||
// convert the JSON back into proto form, and pack the custom data in
|
||||
const updatedStates = SchedulingStates.fromJson(statesJson);
|
||||
packCustomData(updatedStates, customData);
|
||||
|
||||
return updatedStates;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,146 @@
|
|||
// Copyright: Ankitects Pty Ltd and contributors
|
||||
// License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
|
||||
|
||||
import { SchedulingContext, SchedulingStates, SchedulingStatesWithContext } from "@tslib/anki/scheduler_pb";
|
||||
|
||||
import { applyStateTransform } from "./answering";
|
||||
|
||||
/* eslint
|
||||
@typescript-eslint/no-explicit-any: "off",
|
||||
*/
|
||||
|
||||
function exampleInput(): SchedulingStatesWithContext {
|
||||
return SchedulingStatesWithContext.fromJson(
|
||||
{
|
||||
"states": {
|
||||
"current": {
|
||||
"normal": {
|
||||
"review": {
|
||||
"scheduledDays": 1,
|
||||
"elapsedDays": 2,
|
||||
"easeFactor": 1.850000023841858,
|
||||
"lapses": 4,
|
||||
"leeched": false,
|
||||
},
|
||||
},
|
||||
"customData": "{\"v\":\"v3.20.0\",\"seed\":2104,\"d\":5.39,\"s\":11.06}",
|
||||
},
|
||||
"again": {
|
||||
"normal": {
|
||||
"relearning": {
|
||||
"review": {
|
||||
"scheduledDays": 1,
|
||||
"elapsedDays": 0,
|
||||
"easeFactor": 1.649999976158142,
|
||||
"lapses": 5,
|
||||
"leeched": false,
|
||||
},
|
||||
"learning": {
|
||||
"remainingSteps": 1,
|
||||
"scheduledSecs": 600,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"hard": {
|
||||
"normal": {
|
||||
"review": {
|
||||
"scheduledDays": 2,
|
||||
"elapsedDays": 0,
|
||||
"easeFactor": 1.7000000476837158,
|
||||
"lapses": 4,
|
||||
"leeched": false,
|
||||
},
|
||||
},
|
||||
},
|
||||
"good": {
|
||||
"normal": {
|
||||
"review": {
|
||||
"scheduledDays": 4,
|
||||
"elapsedDays": 0,
|
||||
"easeFactor": 1.850000023841858,
|
||||
"lapses": 4,
|
||||
"leeched": false,
|
||||
},
|
||||
},
|
||||
},
|
||||
"easy": {
|
||||
"normal": {
|
||||
"review": {
|
||||
"scheduledDays": 6,
|
||||
"elapsedDays": 0,
|
||||
"easeFactor": 2,
|
||||
"lapses": 4,
|
||||
"leeched": false,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"context": { "deckName": "hello", "seed": 123 },
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
test("can change oneof", () => {
|
||||
let states = exampleInput().states!;
|
||||
const jsonStates = states.toJson({ "emitDefaultValues": true }) as any;
|
||||
// again should be a relearning state
|
||||
const inner = states.again?.kind?.value?.kind;
|
||||
assert(inner?.case === "relearning");
|
||||
expect(inner.value.learning?.remainingSteps).toBe(1);
|
||||
// change it to a review state
|
||||
jsonStates.again.normal = { "review": jsonStates.again.normal.relearning.review };
|
||||
states = SchedulingStates.fromJson(jsonStates);
|
||||
const inner2 = states.again?.kind?.value?.kind;
|
||||
assert(inner2?.case === "review");
|
||||
// however, it's not valid to have multiple oneofs set
|
||||
jsonStates.again.normal = { "review": jsonStates.again.normal.review, "learning": {} };
|
||||
expect(() => {
|
||||
SchedulingStates.fromJson(jsonStates);
|
||||
}).toThrow();
|
||||
});
|
||||
|
||||
test("no-op transform", async () => {
|
||||
const input = exampleInput();
|
||||
const output = await applyStateTransform(input, async (states: any, customData, ctx) => {
|
||||
expect(ctx.deckName).toBe("hello");
|
||||
expect(customData.easy.seed).toBe(2104);
|
||||
expect(states!.again!.normal!.relearning!.learning!.remainingSteps).toBe(1);
|
||||
});
|
||||
// the input only has customData set on `current`, so we need to update it
|
||||
// before we compare the two as equal
|
||||
input.states!.again!.customData = input.states!.current!.customData;
|
||||
input.states!.hard!.customData = input.states!.current!.customData;
|
||||
input.states!.good!.customData = input.states!.current!.customData;
|
||||
input.states!.easy!.customData = input.states!.current!.customData;
|
||||
expect(output).toStrictEqual(input.states);
|
||||
});
|
||||
|
||||
test("custom data change", async () => {
|
||||
const output = await applyStateTransform(exampleInput(), async (_states: any, customData, _ctx) => {
|
||||
customData.easy = { foo: "hello world" };
|
||||
});
|
||||
expect(output!.hard!.customData).not.toMatch(/hello world/);
|
||||
expect(output!.easy!.customData).toBe("{\"foo\":\"hello world\"}");
|
||||
});
|
||||
|
||||
test("adjust interval", async () => {
|
||||
const output = await applyStateTransform(exampleInput(), async (states: any, _customData, _ctx) => {
|
||||
states.good.normal.review.scheduledDays = 10;
|
||||
});
|
||||
const kind = output.good?.kind?.value?.kind;
|
||||
assert(kind?.case === "review");
|
||||
expect(kind.value.scheduledDays).toBe(10);
|
||||
});
|
||||
|
||||
test("default context values exist", async () => {
|
||||
const ctx = SchedulingContext.fromBinary(new Uint8Array());
|
||||
expect(ctx.deckName).toBe("");
|
||||
expect(ctx.seed).toBe(0n);
|
||||
});
|
||||
|
||||
function assert(condition: boolean): asserts condition {
|
||||
if (!condition) {
|
||||
throw new Error();
|
||||
}
|
||||
}
|
|
@ -5,6 +5,7 @@
|
|||
"compilerOptions": {
|
||||
// css-browser-selector fails if our output bundle is strict
|
||||
"alwaysStrict": false,
|
||||
"composite": false
|
||||
"composite": false,
|
||||
"types": ["jest"]
|
||||
}
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue