Commits

Kota UENISHI committed 4c35c43

simple and slow query

  • Participants
  • Parent commits 8b655c3

Comments (0)

Files changed (5)

File sql/json_lexer.mll

   | '[' { LBRACE2 }
   | ']' { RBRACE2 }
   | ':' { COLON }
-  | ','     { COMMA }
+  | ',' { COMMA }
   | "true"  { TRUE  }
   | "false" { FALSE }
   | "null"  { NULL  }
-  | "\""      {
+  | "\""    {
     in_quote := true;
     quote_ptr := 0;
     quote lexbuf;
     in_quote := false;
     QUOTE (String.sub quote_buf 0 (!quote_ptr))
   }
-  | "\n"       { print "f"; token lexbuf }
-  | digit as s { print ">>"; print s; INT(int_of_string s) }
-(*  | eof	         { print "eof."; EOF } *)
-  | _            { token lexbuf }
+
+  | "\n"       { token lexbuf }
+  | digit as s { INT(int_of_string s) }
+  | eof	       { EOF }
+  | _          { token lexbuf }
 and quote = parse
   | "\""  { () }
   | _ as c {

File sql/json_parser.mly

 
 %{
 
-  let _ = Parsing.set_trace true;;
+  let _ = Parsing.set_trace false;;
   let debugprint = ref true;;
   
   let print str = if !debugprint then print_endline str;;
 
 %token TRUE FALSE NULL
 %token LBRACE RBRACE COLON COMMA
-%token LBRACE2 RBRACE2
+%token LBRACE2 RBRACE2 EOF
 %token <int> INT
 
 %token <string> QUOTE
 
 %start input
-%type <Json.json_value> input
+%type <Json.json_value list> input
 
 %%
 
-input: | exp { $1 };
+input:
+  | exp { $1 }
+;
 
 exp:
-  | value { $1 }
+  | value EOF { [$1] }
+  | value exp { $1::$2 }
 ;
 value:
   | QUOTE { Json.String($1) }
 
   print_endline sample_json;
   let j = parse_json sample_json in
-  print_endline (Json.pp_json j);
+  List.iter print_endline (List.map Json.pp_json j);
 
-  print_endline " => hello";;
+  print_endline " => test ok";;
 
 let rec eval_pairs cond = function
   | [] -> false;
 
 let rec do_loop () =
   print_string "> ";
-  let line = read_line() in
-  let q = parse_sql line in
-  Sql.pp_query q;
-  let table_name = Sql.get_table_name q in
-  let json = parse_json_file (table_name ^ ".json") in
-  let ans = run_query q [json] in
-  List.iter print_endline (List.map Json.pp_json ans);
-  do_loop();;
+  try
+    let line = read_line() in
+    let q = parse_sql line in
+    Sql.pp_query q;
+    let table_name = Sql.get_table_name q in
+    Printf.printf "loading json file... ";
+    let json = parse_json_file (table_name ^ ".json") in
+    print_endline "done.";
+    let ans = run_query q json in
+    List.iter print_endline (List.map Json.pp_json ans);
+    do_loop()
+
+  with
+    |End_of_file ->
+      print_endline "";
+    |exn ->
+      print_endline (Printexc.to_string exn);
+      do_loop();;
+
 
 let _ =
   (* test();; *)

File sql/sql_lexer.mll

     in_quote := false;
     QUOTE (String.sub quote_buf 0 (!quote_ptr))
   }
-  | digit as s { print "<<<"; print s; INT(int_of_string s) }
-  | literal as s { print s; LITERAL(s) }
+  | digit as s   { INT(int_of_string s) }
+  | literal as s { LITERAL(s) }
 (*  | "\n"         { print "f"; token lexbuf } *)
-  | _ as c       { print_char c; print "boo";  token lexbuf }
+  | _  { token lexbuf }
+
 and quote = parse
   | "'"  { () }
   | _ as c {

File sql/sql_parser.mly

 
 %{
 
-  let _ = Parsing.set_trace true;;
+  let _ = Parsing.set_trace false;;
   let debugprint = ref true;;
   
   let print str = if !debugprint then print_endline str;;
 input: | exp { $1 };
 
 exp:
-  | SELECT ASTERISK FROM LITERAL SEMICOLON
-      { print "here<";  print $4; Sql.Select(Sql.All_col, $4, []) }
-  | SELECT columns  FROM LITERAL SEMICOLON
-      { print "here<<"; print $4; Sql.Select(Sql.Columns($2), $4, []) }
+  | SELECT ASTERISK FROM LITERAL SEMICOLON { Sql.Select(Sql.All_col, $4, []) }
+  | SELECT columns  FROM LITERAL SEMICOLON { Sql.Select(Sql.Columns($2), $4, []) }
 
   | SELECT ASTERISK FROM LITERAL where_clause SEMICOLON
-      { print "hehe"; print $4; Sql.Select(Sql.All_col, $4, $5) }
+      { Sql.Select(Sql.All_col, $4, $5) }
   | SELECT columns  FROM LITERAL where_clause SEMICOLON
-      { print "hoho"; print $4; Sql.Select(Sql.Columns($2), $4, $5) }
+      { Sql.Select(Sql.Columns($2), $4, $5) }
 ;
 
 columns: