with Ada.Characters.Latin_1, Ada.Directories, Ada.Strings.Fixed, Ada.Strings.Maps, Ada.Text_IO, CSV, Datatypes, FMD, GNAT.Regpat, GNATCOLL.JSON, UnZip; use Datatypes; package body Deck_IO is package Latin renames Ada.Characters.Latin_1; package FD renames Ada.Directories; package Strfix renames Ada.Strings.Fixed; package Strmap renames Ada.Strings.Maps; package TIO renames Ada.Text_IO; package Pat renames GNAT.Regpat; package JS renames GNATCOLL.JSON; function "+" (S : in String) return SU.Unbounded_String renames SU.To_Unbounded_String; function "-" (US : in SU.Unbounded_String) return String renames SU.To_String; function To_Unbounded_String (Item : in Field_ID) return SU.Unbounded_String is begin return SU.Unbounded_String (Item); end To_Unbounded_String; function To_Unbounded_String (Item : in Datatypes.Field) return SU.Unbounded_String is begin return SU.Unbounded_String (Item); end To_Unbounded_String; procedure Finalize (This : in out Deck_Handle) is begin if This.Opened then FD.Delete_File (-This.Tempfile); end if; end Finalize; function Generate_Temp_Name return String is Handle : TIO.File_Type; Filename : SU.Unbounded_String; begin TIO.Create (File => Handle); Filename := +TIO.Name (Handle); TIO.Close (Handle); return -Filename; end Generate_Temp_Name; function Matches (Models : in Model_Map; Notes : in Note_Vector) return Boolean is use type Ada.Containers.Count_Type; begin for Note of Notes loop if not Models.Contains (Note.Model) or else Note.Fields.Length > Models.Element (Note.Model).Fields.Length then return False; end if; end loop; return True; end Matches; procedure Read_Media_Collection (Filename : in String; Media : out Media_Collection) is Temp : String := Generate_Temp_Name; Input_Handle : TIO.File_Type; Raw_Data : SU.Unbounded_String; JSON_Data : JS.JSON_Value; procedure Map_Iteration (Name : in JS.UTF8_String; Value : in JS.JSON_Value) is Val : SU.Unbounded_String := JS.Get (Value); begin Media.Map.Insert (Media_Name (Val), Media_ID (+Name)); end Map_Iteration; begin UnZip.Extract (Filename, "media", Temp); TIO.Open (Input_Handle, TIO.In_File, Temp); while not TIO.End_Of_File (Input_Handle) loop SU.Append (Raw_Data, TIO.Get_Line (Input_Handle) & Latin.LF); end loop; TIO.Close (Input_Handle); FD.Delete_File (Temp); JSON_Data := JS.Read (-Raw_Data); JS.Map_JSON_Object (JSON_Data, Map_Iteration'Access); Media.Filename := +Filename; end Read_Media_Collection; procedure Open_Database (Filename : in String; Deck : in out Deck_Handle) is Temp : String := Generate_Temp_Name; begin UnZip.Extract (Filename, "collection.anki2", Temp); Deck.SQL_Handle.Open (Temp); Deck.Opened := True; Deck.Tempfile := +Temp; end Open_Database; function Is_Open (Deck : in Deck_Handle) return Boolean is begin return Deck.Opened; end Is_Open; procedure Close_Database (Deck : in out Deck_Handle) is begin if Deck.Opened = False then return; end if; Deck.SQL_Handle.Close; Deck.Opened := False; FD.Delete_File (-Deck.Tempfile); Deck.Tempfile := +""; end Close_Database; procedure Extract_Field_IDs (Value : in JS.JSON_Array; FIDs : out Field_ID_Vector) is Index : Positive; Item : JS.JSON_Value; Ordinal : Integer; begin Index := JS.Array_First (Value); while JS.Array_Has_Element (Value, Index) loop Item := JS.Array_Element (Value, Index); Ordinal := JS.Get (Item, "ord") + 1; if Ordinal > Integer (FIDs.Length) then FIDs.Set_Length (Ada.Containers.Count_Type (Ordinal)); end if; declare Name : String := JS.Get (Item, "name"); begin FIDs.Replace_Element (Field_Ordinal (Ordinal), Field_ID (+Name)); end; Index := Index + 1; end loop; end Extract_Field_IDs; procedure Regex_Fields (Raw_Data : in JS.UTF8_String; FIDs : out Field_ID_Vector) is use type Pat.Match_Location; Matches : Pat.Match_Array (0 .. 1); Regexp : Pat.Pattern_Matcher := -- My god I hate regular expressions, they're such unreadable nonsense Pat.Compile ("{{(?:(?:\w|\s)+:)?((?:\w|\s)+)}}", Pat.Single_Line); Marker : Positive := Raw_Data'First; begin loop Pat.Match (Regexp, Raw_Data, Matches, Marker); exit when Matches (1) = Pat.No_Match; declare Item : String := Raw_Data (Matches (1).First .. Matches (1).Last); begin if Item /= "FrontSide" then FIDs.Append (Field_ID (+Item)); end if; end; Marker := Matches (0).Last + 1; end loop; end Regex_Fields; procedure Extract_Templates (Value : in JS.JSON_Array; TMPLs : out Template_Vector) is Index : Positive; Item : JS.JSON_Value; Current_Tmpl : Template; begin Index := JS.Array_First (Value); while JS.Array_Has_Element (Value, Index) loop Item := JS.Array_Element (Value, Index); Regex_Fields (JS.Get (Item, "qfmt"), Current_Tmpl.Question); Regex_Fields (JS.Get (Item, "afmt"), Current_Tmpl.Answer); TMPLs.Append (Current_Tmpl); Current_Tmpl.Question.Clear; Current_Tmpl.Answer.Clear; Index := Index + 1; end loop; end Extract_Templates; procedure Extract_Model (Name : in JS.UTF8_String; Value : in JS.JSON_Value; Models : in out Model_Map) is Current_Model : Model; begin Extract_Field_IDs (JS.Get (Value, "flds"), Current_Model.Fields); Extract_Templates (JS.Get (Value, "tmpls"), Current_Model.Templates); Models.Insert (Model_ID (+Name), Current_Model); end Extract_Model; procedure Query_Models (Deck : in out Deck_Handle; Models : out Model_Map) is Statement : SQLite3.SQLite3_Statement; Raw_Data : SU.Unbounded_String; JSON_Data : JS.JSON_Value; procedure JSON_Callback (Name : in JS.UTF8_String; Value : in JS.JSON_Value) is begin Extract_Model (Name, Value, Models); end JSON_Callback; begin Deck.SQL_Handle.Prepare ("SELECT models FROM col", Statement); Statement.Step; Raw_Data := Statement.Column (0); JSON_Data := JS.Read (-Raw_Data); JS.Map_JSON_Object (JSON_Data, JSON_Callback'Access); end Query_Models; procedure Tokenize_Fields (Raw_Data : in SU.Unbounded_String; Fields : out Field_Vector) is Charset : Strmap.Character_Set := Strmap.To_Set (Latin.US); Position : Positive := 1; Next : Natural := 1; begin while Next /= 0 and Position <= SU.Length (Raw_Data) loop Next := SU.Index (Raw_Data, Charset, Position); if Position <= Next then Fields.Append (Datatypes.Field (SU.Unbounded_Slice (Raw_Data, Position, Next - 1))); else Fields.Append (Datatypes.Field (SU.Unbounded_Slice (Raw_Data, Position, SU.Length (Raw_Data)))); end if; Position := Next + 1; end loop; end Tokenize_Fields; procedure Query_Notes (Deck : in out Deck_Handle; Notes : out Note_Vector) is use type SQLite3.Status_Code; Statement : SQLite3.SQLite3_Statement; Current_Note : Note; Raw_Model : SU.Unbounded_String; begin Deck.SQL_Handle.Prepare ("SELECT mid, flds FROM notes", Statement); loop Statement.Step; exit when Statement.Status /= SQLite3.SQLITE_ROW; Raw_Model := Statement.Column (0); Current_Note.Model := Model_ID (Raw_Model); Tokenize_Fields (Statement.Column (1), Current_Note.Fields); Notes.Append (Current_Note); Current_Note.Fields.Clear; end loop; end Query_Notes; function Compose_Name (Directory : in String; Basename : in String; Extension : in String; Model_Num : in Positive) return SU.Unbounded_String is Result : SU.Unbounded_String := +FD.Compose (Directory, Basename); begin if Model_Num > 1 then SU.Append (Result, "-" & Strfix.Trim (Integer'Image (Model_Num), Ada.Strings.Left)); end if; SU.Append (Result, "." & Extension); return Result; end Compose_Name; function Compose_Dir (Directory : in String; Basename : in String; Model_Num : in Positive) return SU.Unbounded_String is Result : SU.Unbounded_String := +FD.Compose (Directory, Basename); begin if Model_Num > 1 then SU.Append (Result, "-" & Strfix.Trim (Integer'Image (Model_Num), Ada.Strings.Left)); end if; return Result; end Compose_Dir; procedure Extract_Images (Texts : in out Datatypes.Field_Vector; Outdir : in SU.Unbounded_String; Media : in Media_Collection) is use type Pat.Match_Location; Matches : Pat.Match_Array (0 .. 2); Regexp : Pat.Pattern_Matcher := Pat.Compile -- Absolutely disgusting regexes ("", Pat.Single_Line); Marker : Positive; Location : Pat.Match_Location; Match_Name : SU.Unbounded_String; begin for Text of Texts loop Marker := 1; loop Pat.Match (Regexp, -(SU.Unbounded_String (Text)), Matches, Marker); exit when Matches (0) = Pat.No_Match; Location := (if Matches (1) = Pat.No_Match then Matches (2) else Matches (1)); Match_Name := SU.Unbounded_Slice (SU.Unbounded_String (Text), Location.First, Location.Last); if Media.Map.Contains (Media_Name (Match_Name)) then if not FD.Exists (-Outdir) then FD.Create_Directory (-Outdir); end if; if not FD.Exists (FD.Compose (-Outdir, -Match_Name)) then UnZip.Extract (-Media.Filename, SU.To_String (SU.Unbounded_String (Media.Map.Element (Media_Name (Match_Name)))), FD.Compose (-Outdir, -Match_Name)); end if; SU.Insert (SU.Unbounded_String (Text), Location.First, "%%/"); end if; Marker := Matches (0).Last + 1; end loop; end loop; end Extract_Images; procedure Write_CSV (Directory : in String; Basename : in String; Models : in Model_Map; Notes : in Note_Vector; Overwrite : in Boolean := False) is package My_CSV is new CSV; Counter : Positive := 1; Outname : SU.Unbounded_String; File_Handle : TIO.File_Type; Row_Size : Positive; begin for C in Models.Iterate loop Outname := Compose_Name (Directory, Basename, "csv", Counter); if FD.Exists (-Outname) then if not Overwrite then raise FD.Name_Error; else FD.Delete_File (-Outname); end if; end if; TIO.Create (File_Handle, TIO.Out_File, -Outname); Row_Size := Positive (Model_Maps.Element (C).Fields.Length); My_CSV.Put_Header (File_Handle, Model_Maps.Element (C).Fields); for N of Notes loop if N.Model = Model_Maps.Key (C) then My_CSV.Put_Row (File_Handle, N.Fields, Row_Size); end if; end loop; TIO.Close (File_Handle); Counter := Counter + 1; end loop; end Write_CSV; procedure Write_FMD (Directory : in String; Basename : in String; Models : in Model_Map; Notes : in Note_Vector; Media : in Media_Collection; Overwrite : in Boolean := False) is Counter : Positive := 1; Outname, Outdir : SU.Unbounded_String; File_Handle : TIO.File_Type; Current_Entry : Datatypes.Field_Vector; Entry_Size : Positive; begin for C in Models.Iterate loop Outname := Compose_Name (Directory, Basename, "fmd", Counter); Outdir := Compose_Dir (Directory, Basename, Counter); if FD.Exists (-Outname) then if not Overwrite then raise FD.Name_Error with "output file " & (-Outname) & " already exists"; else FD.Delete_File (-Outname); end if; end if; if FD.Exists (-Outdir) and not Overwrite then raise FD.Name_Error with "media directory " & (-Outdir) & " already exists"; end if; TIO.Create (File_Handle, TIO.Out_File, -Outname); Entry_Size := Positive (Model_Maps.Element (C).Fields.Length); FMD.Put_Header (File_Handle); FMD.Put_Fields (File_Handle, Model_Maps.Element (C).Fields); FMD.Start_Pack_Section (File_Handle); for Tmpl of Model_Maps.Element (C).Templates loop FMD.Put_Pack (File_Handle, Tmpl.Question, Tmpl.Answer); end loop; FMD.End_Pack_Section (File_Handle); FMD.Start_Entry_Section (File_Handle); for N of Notes loop if N.Model = Model_Maps.Key (C) then Current_Entry := N.Fields; Extract_Images (Current_Entry, Outdir, Media); FMD.Put_Entry (File_Handle, Current_Entry, Entry_Size); end if; end loop; FMD.End_Entry_Section (File_Handle); FMD.Put_Footer (File_Handle); TIO.Close (File_Handle); Counter := Counter + 1; end loop; end Write_FMD; end Deck_IO;