Browse Source

update database

main
parent
commit
6d2ea0ead7
6 changed files with 63 additions and 10 deletions
  1. +2
    -0
      docker-compose.yml
  2. +5
    -0
      initdb.d/01-function.sql
  3. +33
    -0
      initdb.d/10-wanikani.sql
  4. +13
    -3
      initdb.d/22-entry.sql
  5. +6
    -6
      initdb.d/24-preset.sql
  6. +4
    -1
      initdb.d/27-freq.sql

+ 2
- 0
docker-compose.yml View File

@ -14,6 +14,8 @@ services:
PGUSER: *pguser
PGPASSWORD: *pgpass
PGDATABASE: *pgdb
WANIKANI_API_KEY: # required to cache to database in advance
ports:
- 5433:5432 # pgAdmin connection port

+ 5
- 0
initdb.d/01-function.sql View File

@ -24,3 +24,8 @@ CREATE OR REPLACE FUNCTION identity (JSONB) RETURNS JSONB AS
$func$
SELECT $1;
$func$ LANGUAGE SQL IMMUTABLE;
CREATE OR REPLACE FUNCTION identity (TEXT) RETURNS TEXT AS
$func$
SELECT $1;
$func$ LANGUAGE SQL IMMUTABLE;

+ 33
- 0
initdb.d/10-wanikani.sql View File

@ -17,6 +17,10 @@ CREATE TABLE "wanikani.subjects" (
);
CREATE INDEX "idx_wanikani.subjects_object" ON "wanikani.subjects" ("object");
CREATE INDEX "idx_wanikani.subjects_data-characters" ON "wanikani.subjects" (("data" ->> 'characters'));
CREATE INDEX "idx_wanikani.subjects_data-level" ON "wanikani.subjects" ((("data" -> 'level')::int));
CREATE INDEX "idx_wanikani.subjects_data" ON "wanikani.subjects"
USING pgroonga("data")
WITH (
@ -29,3 +33,32 @@ CREATE INDEX "idx_wanikani.subjects_data_kana" ON "wanikani.subjects"
tokenizer='TokenMecab',
normalizer='NormalizerNFKC100("unify_kana", true)'
);
CREATE MATERIALIZED VIEW "wanikani.subjects.sentences" AS (
SELECT
(s ->> 'ja') "ja",
(s ->> 'en') "en"
FROM "wanikani.subjects", jsonb_array_elements("data" -> 'context_sentences') "s"
WHERE s ->> 'ja' > ''
);
CREATE INDEX "idx_wanikani.subjects.sentences_ja" ON "wanikani.subjects.sentences"
USING pgroonga("ja")
WITH (
tokenizer='TokenMecab',
normalizer='NormalizerNFKC100("unify_kana", true)'
);
CREATE INDEX "idx_wanikani.subjects.sentences_ja_kana" ON "wanikani.subjects.sentences"
USING pgroonga((identity("ja")))
WITH (
tokenizer='TokenMecab("use_reading", true)',
normalizer='NormalizerNFKC100("unify_kana", true)'
);
CREATE INDEX "idx_wanikani.subjects.sentences_en" ON "wanikani.subjects.sentences"
USING pgroonga("en")
WITH (
plugins='token_filters/stem',
token_filters='TokenFilterStem'
);

+ 13
- 3
initdb.d/22-entry.sql View File

@ -4,6 +4,10 @@ CREATE TABLE "entry" (
"updatedAt" TIMESTAMPTZ DEFAULT now(),
"userId" UUID NOT NULL REFERENCES "user"("id") ON DELETE CASCADE,
"type" TEXT NOT NULL,
"_type" TEXT[] GENERATED ALWAYS AS (CASE
WHEN "type" = 'character' THEN ARRAY['character', 'hanzi', 'kanji']
ELSE ARRAY["type"]
END) STORED,
"entry" TEXT[] NOT NULL CHECK ("entry"[1] IS NOT NULL),
"reading" JSONB NOT NULL DEFAULT '{}', -- TODO: jsonschema
"translation" JSONB NOT NULL DEFAULT '{}', -- TODO: jsonschema
@ -11,9 +15,14 @@ CREATE TABLE "entry" (
"tag" TEXT[] NOT NULL DEFAULT '{}',
"level" FLOAT,
"level.kanji" INT,
"frequency" FLOAT,
"key" TEXT UNIQUE,
"_meta" JSONB, -- TODO: jsonschema
"priority" INT GENERATED ALWAYS AS (CASE
WHEN "type" = 'vocabulary' THEN 10
WHEN "type" = 'character' THEN 5
WHEN "type" = 'sentence' THEN 1
ELSE 0
END) STORED,
PRIMARY KEY ("id")
);
@ -22,12 +31,13 @@ CREATE TRIGGER "t_entry_updatedAt"
FOR EACH ROW
EXECUTE PROCEDURE "f_updatedAt"();
CREATE INDEX "idx_entry_priority" ON "entry" ("priority");
CREATE INDEX "idx_entry_updatedAt" ON "entry" ("updatedAt");
CREATE INDEX "idx_entry_userId" ON "entry" ("userId");
CREATE INDEX "idx_entry_type" ON "entry" ("type");
CREATE INDEX "idx_entry_type" ON "entry" USING GIN("_type");
CREATE INDEX "idx_entry_level" ON "entry" ("level");
CREATE INDEX "idx_entry_level.kanji" ON "entry" ("level.kanji");
CREATE INDEX "idx_entry_frequency" ON "entry" ("frequency");
CREATE INDEX "idx_entry_description" ON "entry"
USING pgroonga("description")

+ 6
- 6
initdb.d/24-preset.sql View File

@ -1,4 +1,4 @@
CREATE TABLE "preset.quiz" (
CREATE TABLE "quiz.preset" (
"id" UUID NOT NULL PRIMARY KEY DEFAULT uuid_generate_v4(),
"createdAt" TIMESTAMPTZ DEFAULT now(),
"updatedAt" TIMESTAMPTZ DEFAULT now(),
@ -7,12 +7,12 @@ CREATE TABLE "preset.quiz" (
"settings" JSONB NOT NULL
);
CREATE TRIGGER "t_preset.quiz_updatedAt"
BEFORE UPDATE ON "preset.quiz"
CREATE TRIGGER "t_quiz.preset_updatedAt"
BEFORE UPDATE ON "quiz.preset"
FOR EACH ROW
EXECUTE PROCEDURE "f_updatedAt"();
CREATE INDEX "idx_preset.quiz_updatedAt" ON "preset.quiz" ("updatedAt");
CREATE INDEX "idx_preset.quiz_userId" ON "preset.quiz" ("userId");
CREATE INDEX "idx_preset.quiz_name" ON "preset.quiz"
CREATE INDEX "idx_quiz.preset_updatedAt" ON "quiz.preset" ("updatedAt");
CREATE INDEX "idx_quiz.preset_userId" ON "quiz.preset" ("userId");
CREATE INDEX "idx_quiz.preset_name" ON "quiz.preset"
USING pgroonga ("name");

+ 4
- 1
initdb.d/27-freq.sql View File

@ -4,4 +4,7 @@ CREATE TABLE "frequency" (
PRIMARY KEY ("entry")
);
CREATE INDEX "idx_frequency_frequency" ON "frequency"("frequency");
CREATE INDEX "idx_frequency_frequency" ON "frequency" ("frequency");
CREATE INDEX "idx_frequency_entry" ON "frequency"
USING pgroonga("entry")
WITH (normalizer='NormalizerNFKC100("unify_kana", true)');

Loading…
Cancel
Save