Commits

Jason Moiron committed 3918a64

lots of organizational improvements, add a ZipDir which will be the final aspect of creating chapters

Comments (0)

Files changed (8)

-*.swp
+*.sw[op]
+// abstracts access to the config database, which is also used to store series
+// and chapter information so that not all queries are live against all sites
+
 package main
 
 import (
 	"fmt"
 	_ "github.com/mattn/go-sqlite3"
 	"os"
-	"os/user"
 	"path/filepath"
 	"runtime"
-	"strings"
 )
 
 type Xdg struct {
 	DownloadPath string
 	SiteOrder    []string
 	path         string
+	db           *sql.DB
 }
 
 type Series struct {
 	Updated int64
 }
 
-//"create table chapters (series text, site text, url text primary key, number text)",
-
 type Chapter struct {
 	Series  string
 	Site    string
 
 var DEFAULT_DOWNLOAD_PATH = expandpath("~/Downloads")
 
-// return whether a path exists
-func exists(path string) bool {
-	_, err := os.Stat(path)
-	if err == nil {
-		return true
-	}
-	if os.IsNotExist(err) {
-		return false
-	}
-	fmt.Printf("%s\n", err)
-	return false
-}
-
-// return a path with posix tilde and ENV expansion
-func expandpath(path string) string {
-	if path[0] == '~' {
-		sep := strings.Index(path, string(os.PathSeparator))
-		if sep < 0 {
-			sep = len(path)
-		}
-		var err error
-		var u *user.User
-		username := path[1:sep]
-		if len(username) == 0 {
-			u, err = user.Current()
-		} else {
-			u, err = user.Lookup(username)
-		}
-		if err == nil {
-			path = filepath.Join(u.HomeDir, path[sep:])
-		}
-	}
-	path = os.ExpandEnv(path)
-	abs, err := filepath.Abs(path)
-	if err != nil {
-		return path
-	}
-	return abs
-}
-
 func getenv(key, default_ string) string {
 	value := os.Getenv(key)
 	if len(value) == 0 {
 	if err != nil {
 		panic(fmt.Sprintf("%q", err))
 	}
+	c.db = db
 	return db
 }
 
-func (c *Config) getval(key string) (string, error) {
+func (c *Config) GetVal(key string) (string, error) {
 	var value string
-	db := c.Open()
-	defer db.Close()
-	row := db.QueryRow("select value from config where key = ?", key)
+	row := c.db.QueryRow("select value from config where key = ?", key)
 	err := row.Scan(&value)
 	return value, err
 }
 
+func (c *Config) SetVal(key, val string) {
+	Exec(c.db, "update config set val=? where key=?", val, key)
+}
+
 func (c *Config) init() {
 	var err error
 	if !xdg.Initialized {
 		c.initDb()
 	}
 
-	c.DownloadPath, err = c.getval("DownloadPath")
+	c.Open()
+
+	c.DownloadPath, err = c.GetVal("DownloadPath")
 	if err != nil {
 		fmt.Errorf("Could not read key 'DownloadPath' from config: %q\n", err)
 	}
 }
 
+// Abstract interface covering Exec & Query so that transactions and the db can
+// be used by the same functions... 
+type Executor interface {
+	Exec(query string, args ...interface{}) (sql.Result, error)
+	Query(query string, args ...interface{}) (*sql.Rows, error)
+}
+
+// helper function which creates a new transaction or panics on error
+func Begin(db *sql.DB) *sql.Tx {
+	tx, err := db.Begin()
+	if err != nil {
+		panic(err)
+	}
+	return tx
+}
+
+// helper function which runs Exec on an Executor and panics on error
+func Exec(e Executor, query string, args ...interface{}) sql.Result {
+	ret, err := e.Exec(query, args...)
+	if err != nil {
+		panic(err)
+	}
+	return ret
+}
+
+// helper function which runs Query on an Executor and panics on error
+func Query(e Executor, query string, args ...interface{}) *sql.Rows {
+	ret, err := e.Query(query, args...)
+	if err != nil {
+		panic(err)
+	}
+	return ret
+}
+
 func (c *Config) initDb() {
 	tables := []string{
 		"create table config (key text primary key, value text)",
 		"create table series (name text, key text, url text primary key, site text, updated integer default 0)",
 		"create table chapters (name text, number text, url text primary key, series text, site text)",
 	}
-	db := c.Open()
-	defer db.Close()
 	// start a transaction;  sqlite is slow as hell without them
-	tx, err := db.Begin()
-	if err != nil {
-		fmt.Printf("Unable to open transaction on config db: %s\n", err)
-		return
-	}
+	tx := Begin(c.db)
+	defer tx.Commit()
 
 	// create tables
 	for _, t := range tables {
-		_, err := tx.Exec(t)
-		if err != nil {
-			panic(fmt.Sprintf("table panic: %q: %s\n", err, t))
-		}
+		Exec(tx, t)
 	}
 
-	_, err = tx.Exec("insert into config (key, value) values (?, ?)",
-		"DownloadPath", DEFAULT_DOWNLOAD_PATH)
-	if err != nil {
-		panic(fmt.Sprintf("panic: %q\n", err))
-	}
+	Exec(tx, "insert into config (key, value) values (?, ?)", "DownloadPath", DEFAULT_DOWNLOAD_PATH)
 
 	addSite := "insert into sites (name, url, priority) values (?, ?, ?)"
 	tx.Exec(addSite, "manga-access", "http://www.manga-access.com", 1)
 	tx.Exec(addSite, "mangareader", "http://www.mangareader.net", 3)
 	tx.Exec(addSite, "mangafox", "http://www.mangafox.me", 4)
 
-	tx.Commit()
 }
 
-func (c *Config) SetDownloadPath(path string) error {
-	db := c.Open()
-	defer db.Close()
-
-	_, err := db.Exec("update config set value=? where key=?", path, "DownloadPath")
-	return err
+func (c *Config) SetDownloadPath(path string) {
+	c.SetVal("DownloadPath", path)
 }
 
 func (c *Config) AddSite(name, url string, priority int) error {
-	db := c.Open()
-	defer db.Close()
-
-	_, err := db.Exec("insert into sites (name, url, priority) values (?, ?, ?)",
+	_, err := c.db.Exec("insert into sites (name, url, priority) values (?, ?, ?)",
 		name, url, priority)
 	return err
 }
 
 func (c *Config) RemoveSite(name string) error {
-	db := c.Open()
-	defer db.Close()
 	if len(name) == 0 {
 		return fmt.Errorf("Error: name of site to delete must be provided.")
 	}
-	_, err := db.Exec("DELETE FROM sites WHERE name=?", name)
+	_, err := c.db.Exec("DELETE FROM sites WHERE name=?", name)
 	return err
 }
 
 func (c *Config) SetSitePriority(name string, priority int) {
-
+	Exec(c.db, "update sites set priority=? where name=?", priority, name)
 }
 
 // Convenient interface for fetching a list of series objects from the db
+// small library to handle files and paths
+
+package main
+
+import (
+	"archive/zip"
+	"bytes"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"os/user"
+	"path/filepath"
+	"strings"
+)
+
+// return whether a path exists
+func exists(path string) bool {
+	_, err := os.Stat(path)
+	if err == nil {
+		return true
+	}
+	if os.IsNotExist(err) {
+		return false
+	}
+	fmt.Printf("%s\n", err)
+	return false
+}
+
+// return a path with posix tilde and ENV expansion
+func expandpath(path string) string {
+	if path[0] == '~' {
+		sep := strings.Index(path, string(os.PathSeparator))
+		if sep < 0 {
+			sep = len(path)
+		}
+		var err error
+		var u *user.User
+		username := path[1:sep]
+		if len(username) == 0 {
+			u, err = user.Current()
+		} else {
+			u, err = user.Lookup(username)
+		}
+		if err == nil {
+			path = filepath.Join(u.HomeDir, path[sep:])
+		}
+	}
+	path = os.ExpandEnv(path)
+	abs, err := filepath.Abs(path)
+	if err != nil {
+		return path
+	}
+	return abs
+}
+
+// zip a directory (path) into destination path (dest).  because requirements
+// for ukiyo are simple, it is non-recursive
+func ZipDir(path, dest string) error {
+	files, err := ioutil.ReadDir(path)
+	if err != nil {
+		return err
+	}
+	if len(files) == 0 {
+		return fmt.Errorf("Nothing to zip.")
+	}
+
+	buf := new(bytes.Buffer)
+	writer := zip.NewWriter(buf)
+
+	for _, file := range files {
+		if file.IsDir() {
+			continue
+		}
+		realpath := filepath.Join(path, file.Name())
+		zippath := filepath.Join(filepath.Dir(path), file.Name())
+		f, err := writer.Create(zippath)
+		if err != nil {
+			return err
+		}
+		body, err := ioutil.ReadFile(realpath)
+		if err != nil {
+			return err
+		}
+		_, err = f.Write(body)
+		if err != nil {
+			return err
+		}
+	}
+	err = writer.Close()
+	if err != nil {
+		return err
+	}
+	err = ioutil.WriteFile(dest, buf.Bytes(), 0644)
+	if err != nil {
+		return err
+	}
+	return nil
+}
+// convenient http wrappers for ukiyo
+package main
+
+import (
+	"fmt"
+	"io/ioutil"
+	"net/http"
+	"net/url"
+	"strings"
+)
+
+var httpClient = &http.Client{
+	// keep user-agent:
+	// https://groups.google.com/forum/?fromgroups#!topic/golang-nuts/OwGvopYXpwE%5B1-25%5D
+	CheckRedirect: func(req *http.Request, via []*http.Request) error {
+		old := via[0]
+		req.Header.Set("User-Agent", old.UserAgent())
+		return nil
+	},
+}
+
+// http get which can use an optional cache file to return values from
+func HttpGet(url string, cache ...string) ([]byte, error) {
+	cachefile := ""
+	if len(cache) != 0 {
+		cachefile = cache[0]
+	}
+
+	var body []byte
+
+	if len(cachefile) > 0 && exists(cachefile) {
+		if opts.Verbose {
+			fmt.Printf("Reading in cached body for %s (in %s)\n", url, cachefile)
+		}
+		body, _ = ioutil.ReadFile(cachefile)
+	} else {
+		req, err := http.NewRequest("GET", url, nil)
+		if err != nil {
+			return body, err
+		}
+
+		resp, err := httpClient.Do(req)
+		if err != nil {
+			return body, err
+		}
+		defer resp.Body.Close()
+		body, err = ioutil.ReadAll(resp.Body)
+
+		if err != nil {
+			return body, err
+		}
+
+		if len(cachefile) > 0 {
+			// this is for debugging only (to prevent hammering sites)
+			// disable with some kind of flag or remove from code before shipping
+			err = ioutil.WriteFile(cachefile, body, 0655)
+			if err != nil {
+				fmt.Printf("Error writing out cachefile %s\n", cachefile)
+			}
+		}
+	}
+
+	return body, nil
+}
+
+// Get a document object, pre-parsed, which must be freed by the caller
+func HttpGetDocument(url string, cache ...string) (*Document, error) {
+	document := new(Document)
+	body, err := HttpGet(url, cache...)
+	if err != nil {
+		return document, err
+	}
+	err = document.FromBytes(body)
+	if err != nil {
+		return document, err
+	}
+	return document, nil
+}
+
+// Download a url to a path
+func HttpDownloadTo(url, path string)
+
+// return the base url for a given url string
+func BaseUrl(u string) string {
+	parsed, _ := url.Parse(u)
+	return fmt.Sprintf("%s://%s", parsed.Scheme, parsed.Host)
+}
+
+// join multiple url bits into one
+func UrlJoin(strs ...string) string {
+	ss := make([]string, len(strs))
+	for i, s := range strs {
+		if i == 0 {
+			ss[i] = strings.TrimRight(s, "/")
+		} else {
+			ss[i] = strings.TrimLeft(s, "/")
+		}
+	}
+	return strings.Join(ss, "/")
+}
+// This file contains parsing functionlity, both as a wrapper for
+// underlying html parsing backends (for a more specialized, convenient
+// interface) and for various parsing duties done by the rippers
+
 package main
 
 import (
 	"github.com/moovweb/gokogiri/html"
 	"github.com/moovweb/gokogiri/xml"
 	"github.com/moovweb/gokogiri/xpath"
+	"regexp"
 	"time"
 	"unsafe"
 )
 
+var numRegex = regexp.MustCompile("(\\d+(?:\\.\\d+)?)")
+
+// Find a number in a string
+func FindNumber(str string) string {
+	groups := numRegex.FindStringSubmatch(str)
+	if groups == nil {
+		return ""
+	}
+	return groups[1]
+}
+
+// given a unix timestamp, return a formatted date as a string
+func toDate(timestamp int64) string {
+	if timestamp == 0 {
+		return "never"
+	}
+	t := time.Unix(timestamp, 0)
+	return t.Format(time.UnixDate)
+}
+
 func tick() { fmt.Printf("%s\n", time.Now().String()) }
 
 // Selectable implements a simple interface which allows to get the inner text
+// This file contains functions and utilities which aid in searching and
+// filtering cached Chapter and Series data.
+
 package main
 
 import (
 	}
 	series := make([]Series, 0)
 	term := fmt.Sprintf("%%%s%%", strings.Join(terms, "%"))
-	db := config.Open()
-	defer db.Close()
 
-	rows, err := db.Query(q, term, term)
+	rows, err := config.db.Query(q, term, term)
 	if err != nil {
 		return series, err
 	}
 	return series, err
 }
 
-var numRegex = regexp.MustCompile("(\\d+(?:\\.\\d+)?)")
+// Like FindSeries, this attempts to find a chapters with a given series name
+func FindChapters(group bool, series string) ([]*Chapter, error) {
+	chapters := make([]*Chapter, 0)
+	q := `select name, number, group_concat(url), series, group_concat(site)
+		from chapters where series=?`
+	if group {
+		q += " group by number"
+	}
+	q += " order by round(number, 2)"
 
-// Find a number in a string
-func FindNumber(str string) string {
-	groups := numRegex.FindStringSubmatch(str)
-	if groups == nil {
-		return ""
+	rows, err := config.db.Query(q, series)
+	if err != nil {
+		return chapters, err
+	}
+
+	for rows.Next() {
+		c := new(Chapter)
+		err = rows.Scan(&c.Name, &c.Number, &c.Url, &c.Series, &c.Site)
+		if err != nil {
+			fmt.Printf("Error with row: %s\n", err)
+		}
+		c.Numberf, _ = strconv.ParseFloat(c.Number, 64)
+		chapters = append(chapters, c)
 	}
-	return groups[1]
+
+	return chapters, nil
 }
 
 // A rule, which filters are made up of
 func CompileFilter(str string) *Filter {
 	f := new(Filter)
 	f.orig = str
-	rulestrs := strings.Split(str, ",")
-	f.compiled = make([]*Rule, len(rulestrs))
-	for i, r := range rulestrs {
-		f.compiled[i] = CompileRule(r)
+	if len(str) > 0 {
+		rulestrs := strings.Split(str, ",")
+		f.compiled = make([]*Rule, len(rulestrs))
+		for i, r := range rulestrs {
+			f.compiled[i] = CompileRule(r)
+		}
 	}
 	return f
 }
 }
 
 func (f *Filter) ApplyString(arg string) bool {
+	if len(f.compiled) == 0 {
+		return true
+	}
 	for _, r := range f.compiled {
 		if r.ApplyString(arg) {
 			return true
 }
 
 func (f *Filter) ApplyFloat(arg float64) bool {
+	if len(f.compiled) == 0 {
+		return true
+	}
 	for _, r := range f.compiled {
 		if r.ApplyFloat(arg) {
 			return true
 const VERSION = "0.1b"
 
 type Options struct {
-	Update          bool
 	Help            bool
 	Version         bool
 	Verbose         bool
-	List            bool
-	SetDownloadPath bool
-	Sync            bool
-	Show            bool
-	// sites
+	Update          bool
+	SetDownloadPath string
+	Download        bool
 	ListSites       bool
 	AddSite         string
 	RemoveSite      string
 	SetSitePriority string
+	List            bool
+	Add             bool
+	Remove          bool
+	Sync            bool
 	Search          bool
-
-	Filter *Filter
+	Show            bool
+	Filter          *Filter
 }
 
 var opts Options
 }
 
 func main() {
+	defer config.db.Close()
+
 	if opts.Help {
 		optarg.Usage()
 		return
 		return
 	}
 	if opts.Verbose {
-		fmt.Printf("Verbosity on.\n")
+		vPrintf("Verbosity on.\n")
+	}
+
+	if len(opts.SetSitePriority) > 0 {
+		if len(optarg.Remainder) != 1 {
+			fmt.Printf("Error: --set-site-priority requires a name and a priority.\n")
+			return
+		}
+		priority, err := strconv.ParseInt(optarg.Remainder[0], 10, 32)
+		if err != nil {
+			fmt.Printf("Error: priority must be a valid integer.\n")
+			return
+		}
+		SetSitePriority(opts.SetSitePriority, int(priority))
 	}
 
 	if opts.ListSites {
 				fmt.Printf("Error with priority argument: %s\n", err)
 			}
 		} else {
-			db := config.Open()
-			defer db.Close()
-			row := db.QueryRow("select max(priority) from sites")
+			row := config.db.QueryRow("select max(priority) from sites")
 			err = row.Scan(&priority)
 			if err != nil {
 				priority = 1
 	UpdateSites(false)
 }
 
+func SetSitePriority(name string, priority int) {
+	config.SetSitePriority(name, priority)
+}
+
 func ListSites() {
-	db := config.Open()
-	defer db.Close()
+	var err error
+	rows := Query(config.db, "SELECT name, url, priority FROM sites ORDER BY priority")
 
-	rows, err := db.Query("SELECT name, url, priority FROM sites ORDER BY priority")
-	if err != nil {
-		fmt.Printf("Error fetching sites: %s\n", err)
-	}
 	for rows.Next() {
 		site := new(Site)
 		err = rows.Scan(&site.Name, &site.Url, &site.Priority)
 
 	chapters := UpdateChapters(series[0].Name)
 	for _, c := range chapters {
-		fmt.Printf(" * %s %s (%s)\n", c.Series, c.Number, c.Site)
+		if opts.Filter.Match(c.Number) {
+			fmt.Printf(" * %s %s (%s)\n", c.Series, c.Number, c.Site)
+		}
 	}
 }
 
 	optarg.Add("u", "update", "Update all site & series info.", false)
 	optarg.Add("", "sync", "Sync series info with what is on disk.", false)
 	optarg.Add("d", "download", "Download new chapters from series.", false)
+	optarg.Add("", "set-download-path", "Change destination for sync and downloads.", "")
 
 	optarg.Header("Sites")
 	optarg.Add("", "sites", "List sites.", false)
 	optarg.Add("s", "show", "Show chapters from a series.", false)
 	optarg.Add("f", "filter", "Filter chapters to show or download.", "")
 
+	opts.Filter = CompileFilter("")
+
 	for opt := range optarg.Parse() {
 		switch opt.Name {
 		case "help":
 			opts.Help = opt.Bool()
-		case "update":
-			opts.Update = opt.Bool()
 		case "version":
 			opts.Version = opt.Bool()
 		case "verbose":
 			opts.Verbose = opt.Bool()
+		// downloading
+		case "update":
+			opts.Update = opt.Bool()
+		case "sync":
+			opts.Sync = opt.Bool()
+		case "download":
+			opts.Download = opt.Bool()
+		case "set-download-path":
+			opts.SetDownloadPath = opt.String()
 		// sites
 		case "sites":
 			opts.ListSites = opt.Bool()
 			opts.AddSite = opt.String()
 		case "rm-site":
 			opts.RemoveSite = opt.String()
+		case "set-site-priority":
+			opts.SetSitePriority = opt.String()
+		// series
+		case "list":
+		case "add-series":
+		case "rm-series":
 		case "find", "search":
 			opts.Search = opt.Bool()
+		// chapters
 		case "show":
 			opts.Show = opt.Bool()
 		case "filter":
 
 import (
 	"fmt"
-	"io/ioutil"
-	"net/http"
-	"net/url"
-	"strconv"
 	"strings"
 	"time"
 )
 var CHAPTER_UPDATE_FREQUENCY = int64(86400)
 var MAX_CONCURRENT_WORKERS = 3
 
-var client = &http.Client{
-	// keep user-agent:
-	// https://groups.google.com/forum/?fromgroups#!topic/golang-nuts/OwGvopYXpwE%5B1-25%5D
-	CheckRedirect: func(req *http.Request, via []*http.Request) error {
-		old := via[0]
-		req.Header.Set("User-Agent", old.UserAgent())
-		return nil
-	},
-}
-
-func toDate(timestamp int64) string {
-	if timestamp == 0 {
-		return "never"
-	}
-	t := time.Unix(timestamp, 0)
-	return t.Format(time.UnixDate)
-}
-
 func dictcat(a, b []map[string]string) []map[string]string {
 	r := make([]map[string]string, len(a)+len(b))
 	copy(r, a)
 	}
 
 	q := "select name, url, priority, updated from sites WHERE updated < ? ORDER BY priority"
-	db := config.Open()
-	rows, err := db.Query(q, after)
+	rows, err := config.db.Query(q, after)
 	if err != nil {
 		panic(err)
 	}
 	}
 
 	if !Force {
-		fmt.Printf("Updating %d sites last updated over 1 week ago:\n", len(sites))
+		vPrintf("Updating %d sites last updated over 1 week ago:\n", len(sites))
 	} else {
-		fmt.Printf("Force-updating %d sites:\n", len(sites))
+		vPrintf("Force-updating %d sites:\n", len(sites))
 	}
 
 	sem := make(chan bool, MAX_CONCURRENT_WORKERS)
 		sem <- true
 	}
 
-	tx, err := db.Begin()
-	if err != nil {
-		panic(err)
-	}
+	tx := Begin(config.db)
+
+	vPrintf("Received %d total results\n", len(results))
 
-	fmt.Printf("Received %d total results\n", len(results))
 	q = `insert or replace into series 
 		(name, key, url, site, updated) values 
 		(?, ?, ?, ?, coalesce((select updated from series where url = ?), 0))`
 	fmt.Printf("}\n")
 }
 
-func UrlJoin(strs ...string) string {
-	ss := make([]string, len(strs))
-	for i, s := range strs {
-		if i == 0 {
-			ss[i] = strings.TrimRight(s, "/")
-		} else {
-			ss[i] = strings.TrimLeft(s, "/")
-		}
-	}
-	return strings.Join(ss, "/")
-}
-
 var UpdateUrls = map[string]string{
 	"manga-access": "/manga/list",
 	"mangahere":    "/mangalist/",
 	"mangafox":     UpdateMangafox,
 }
 
-// http get which can use an optional cache file to return values from
-func HttpGet(url string, cache ...string) ([]byte, error) {
-	cachefile := ""
-	if len(cache) != 0 {
-		cachefile = cache[0]
-	}
-
-	var body []byte
-
-	if exists(cachefile) {
-		if opts.Verbose {
-			fmt.Printf("Reading in cached body for %s (in %s)\n", url, cachefile)
-		}
-		body, _ = ioutil.ReadFile(cachefile)
-	} else {
-		req, err := http.NewRequest("GET", url, nil)
-		if err != nil {
-			return body, err
-		}
-
-		resp, err := client.Do(req)
-		if err != nil {
-			return body, err
-		}
-		defer resp.Body.Close()
-		body, err = ioutil.ReadAll(resp.Body)
-
-		if err != nil {
-			return body, err
-		}
-
-		if len(cachefile) != 0 {
-			// this is for debugging only (to prevent hammering sites)
-			// disable with some kind of flag or remove from code before shipping
-			err = ioutil.WriteFile(cachefile, body, 0655)
-			if err != nil {
-				fmt.Printf("Error writing out cachefile %s\n", cachefile)
-			}
-		}
-	}
-
-	return body, nil
-}
-
-// Get a document object, pre-parsed, which must be freed by the caller
-func HttpGetDocument(url string, cache ...string) (*Document, error) {
-	document := new(Document)
-	body, err := HttpGet(url, cache...)
-	if err != nil {
-		return document, err
-	}
-	err = document.FromBytes(body)
-	if err != nil {
-		return document, err
-	}
-	return document, nil
-}
-
 func UpdateSite(site Site) []map[string]string {
 	path, ok := UpdateUrls[site.Name]
 	updater := SiteUpdateFunctions[site.Name]
 		return none
 	}
 	url := UrlJoin(site.Url, path)
-	fmt.Printf("Updating %s via %s\n", site.Name, url)
+	vPrintf("Updating %s via %s\n", site.Name, url)
 
 	cachefile := "./cache/" + site.Name + ".html"
 	document, err := HttpGetDocument(url, cachefile)
 	if len(force) != 0 {
 		Force = force[0]
 	}
-	db := config.Open()
-	defer db.Close()
 
 	now := time.Now().Unix()
 	after := now
 		after -= CHAPTER_UPDATE_FREQUENCY
 	}
 
-	series := QuerySeries(db, "select * from series where name = ? AND updated < ?", name, after)
+	series := QuerySeries(config.db, "select * from series where name = ? AND updated < ?", name, after)
 
 	if len(series) > 0 {
 		if !Force {
-			fmt.Printf("Updating %d sites for %s last updated over 1 week ago:\n", len(series), name)
+			vPrintf("Updating %d sites for %s last updated over 1 week ago:\n", len(series), name)
 		} else {
-			fmt.Printf("Force-updating %d sites for %s:\n", len(series), name)
+			vPrintf("Force-updating %d sites for %s:\n", len(series), name)
 		}
 		for _, s := range series {
-			fmt.Printf("Updating %s (@%s, %s)\n", s.Name, s.Site, s.Url)
+			vPrintf("Updating %s (@%s, %s)\n", s.Name, s.Site, s.Url)
 		}
 
 		sem := make(chan bool, MAX_CONCURRENT_WORKERS)
 			sem <- true
 		}
 
-		tx, err := db.Begin()
-		if err != nil {
-			panic(err)
-		}
+		tx := Begin(config.db)
+
+		vPrintf("Received %d total results\n", len(results))
 
-		fmt.Printf("Received %d total results\n", len(results))
 		for _, s := range series {
-			tx.Exec("delete from chapters where name=? and site=?", name, s.Site)
+			Exec(tx, "delete from chapters where name=? and site=?", name, s.Site)
 		}
 		q := `insert into chapters 
 		(name, number, url, series, site) values 
 		(?, ?, ?, ?, ?)`
 
 		for _, r := range results {
-			tx.Exec(q, r["name"], r["number"], r["url"], r["series"], r["site"])
+			Exec(tx, q, r["name"], r["number"], r["url"], r["series"], r["site"])
 		}
 		for _, s := range series {
-			tx.Exec("update series set updated = ? where name = ? and site= ?", now, name, s.Site)
+			Exec(tx, "update series set updated = ? where name = ? and site= ?", now, name, s.Site)
 		}
 		tx.Commit()
 	}
 
-	chapters := make([]*Chapter, 0)
-	rows, err := db.Query("select name, number, group_concat(url), series, group_concat(site) from chapters where series=? group by number order by round(number, 2)", name)
-	if err != nil {
-		panic(err)
-	}
-
-	for rows.Next() {
-		chap := new(Chapter)
-		err = rows.Scan(&chap.Name, &chap.Number, &chap.Url, &chap.Series, &chap.Site)
-		chap.Numberf, _ = strconv.ParseFloat(chap.Number, 64)
-		if err != nil {
-			fmt.Printf("Error: %s\n", err)
-		}
-		chapters = append(chapters, chap)
-	}
-
+	chapters, _ := FindChapters(true, name)
 	return chapters
 }
 
 		return none
 	}
 	defer document.Free()
-	baseurl, _ := url.Parse(series.Url)
-	fmt.Println(baseurl)
-	base := fmt.Sprintf("%s://%s", baseurl.Scheme, baseurl.Host)
+	base := BaseUrl(series.Url)
 
 	anchors := document.CssSelect("a.download-link")
 	data := make([]map[string]string, len(anchors))
 			data[i]["number"] = numelem[0].Text()
 		}
 	}
-	fmt.Printf("Found %d chapters on manga-access\n", len(data))
+	vPrintf("Found %d chapters on manga-access\n", len(data))
 	return data
 }
 
 		return none
 	}
 	defer document.Free()
-	baseurl, _ := url.Parse(series.Url)
-	fmt.Println(baseurl)
-	base := fmt.Sprintf("%s://%s", baseurl.Scheme, baseurl.Host)
+	base := BaseUrl(series.Url)
 
 	anchors := document.CssSelect(".detail_list li a")
 	data := make([]map[string]string, len(anchors))
 			data[i]["url"] = UrlJoin(base, data[i]["url"])
 		}
 	}
-	fmt.Printf("Found %d chapters on mangahere\n", len(data))
+	vPrintf("Found %d chapters on mangahere\n", len(data))
 	return data
 
 }