Commits

Jason Moiron committed 8befad9

migrate to a more go-like arch, mangaaccess->starkana, use sqlx instead of stupid nonsense

  • Participants
  • Parent commits 70a0b21

Comments (0)

Files changed (5)

 import (
 	"database/sql"
 	"fmt"
+	"github.com/jmoiron/sqlx"
 	_ "github.com/mattn/go-sqlite3"
 	"os"
 	"path/filepath"
 	SiteOrder      []string
 	UseUnderscores bool
 	path           string
-	db             *sql.DB
+	db             *sqlx.DB
 }
 
 type Series struct {
 	x.Initialized = true
 }
 
-func (c *Config) Open() *sql.DB {
+func (c *Config) Open() *sqlx.DB {
 	db, err := sql.Open("sqlite3", c.path)
 	if err != nil {
 		panic(fmt.Sprintf("%q", err))
 	}
-	c.db = db
-	return db
+	c.db = &sqlx.DB{*db}
+	return c.db
 }
 
 func (c *Config) GetVal(key string) (string, error) {
 }
 
 func (c *Config) SetVal(key, val string) {
-	Exec(c.db, "update config set value=? where key=?", val, key)
+	c.db.Execf("update config set value=? where key=?", val, key)
 }
 
 func (c *Config) init() {
 	}
 	c.UseUnderscores = useUnderscores == "1"
 
-	rows := Query(c.db, "select name from sites order by priority")
+	rows, err := c.db.Query("select name from sites order by priority")
 	for rows.Next() {
 		var s string
 		rows.Scan(&s)
 
 }
 
-// Abstract interface covering Exec & Query so that transactions and the db can
-// be used by the same functions... 
-type Executor interface {
-	Exec(query string, args ...interface{}) (sql.Result, error)
-	Query(query string, args ...interface{}) (*sql.Rows, error)
-}
-
-// helper function which creates a new transaction or panics on error
-func Begin(db *sql.DB) *sql.Tx {
-	tx, err := db.Begin()
-	if err != nil {
-		panic(err)
-	}
-	return tx
-}
-
-// helper function which runs Exec on an Executor and panics on error
-func Exec(e Executor, query string, args ...interface{}) sql.Result {
-	ret, err := e.Exec(query, args...)
-	if err != nil {
-		panic(err)
-	}
-	return ret
-}
-
-// helper function which runs Query on an Executor and panics on error
-func Query(e Executor, query string, args ...interface{}) *sql.Rows {
-	ret, err := e.Query(query, args...)
-	if err != nil {
-		panic(err)
-	}
-	return ret
-}
-
 func (c *Config) initDb() {
 	tables := []string{
 		"create table config (key text primary key, value text)",
 		"create table log (message text, timestamp integer default 0)",
 	}
 	// start a transaction;  sqlite is slow as hell without them
-	tx := Begin(c.db)
+	tx, _ := c.db.Beginx()
 	defer tx.Commit()
 
 	// create tables
 	for _, t := range tables {
-		Exec(tx, t)
+		tx.Execf(t)
 	}
 
-	Exec(tx, "insert into config (key, value) values (?, ?)", "DownloadPath", DEFAULT_DOWNLOAD_PATH)
-	Exec(tx, "insert into config (key, value) values (?, ?)", "UseUnderscores", "0")
+	tx.Execf("insert into config (key, value) values (?, ?)", "DownloadPath", DEFAULT_DOWNLOAD_PATH)
+	tx.Execf("insert into config (key, value) values (?, ?)", "UseUnderscores", "0")
 
 	addSite := "insert into sites (name, url, priority) values (?, ?, ?)"
-	tx.Exec(addSite, "manga-access", "http://www.manga-access.com", 1)
-	tx.Exec(addSite, "mangahere", "http://www.mangahere.com", 2)
-	tx.Exec(addSite, "mangareader", "http://www.mangareader.net", 3)
-	tx.Exec(addSite, "mangafox", "http://www.mangafox.me", 4)
+	tx.Execf(addSite, "starkana", "https://starkana.me", 1)
+	tx.Execf(addSite, "mangahere", "http://www.mangahere.com", 2)
+	tx.Execf(addSite, "mangareader", "http://www.mangareader.net", 3)
+	tx.Execf(addSite, "mangafox", "http://www.mangafox.me", 4)
 
 }
 
 }
 
 func (c *Config) SetSitePriority(name string, priority int) {
-	Exec(c.db, "update sites set priority=? where name=?", priority, name)
+	c.db.Execf("update sites set priority=? where name=?", priority, name)
 }
 
 func (c *Config) Log(message string) error {
 
 type ListItem struct {
 	Name    string
-	Number  string
+	Number  string `db:"chapter"`
 	Numberf float64
 }
 
 	if w.loaded {
 		return
 	}
-	w.Items = make([]*ListItem, 0)
-	rows := Query(config.db, "select name, chapter from watchlist order by name;")
-
-	for rows.Next() {
-		item := new(ListItem)
-		rows.Scan(&item.Name, &item.Number)
-		item.Numberf, _ = strconv.ParseFloat(item.Number, 64)
-		w.Items = append(w.Items, item)
+	w.Items = []*ListItem{}
+	err := config.db.Select(&w.Items, "select name, chapter from watchlist order by name;")
+	if err != nil {
+		fmt.Println(err)
+		return
+	}
+	for _, i := range w.Items {
+		i.Numberf, _ = strconv.ParseFloat(i.Number, 64)
 	}
-
 	w.loaded = true
 }
 
 
 func (w *Watchlist) Add(name string) {
 	w.Load()
-	tx := Begin(config.db)
-	Exec(tx, "insert into watchlist (name, chapter) values (?, ?);", name, 0)
+	tx, _ := config.db.Beginx()
+	tx.Execf("insert into watchlist (name, chapter) values (?, ?);", name, 0)
 	tx.Commit()
 }
 
 func (w *Watchlist) Remove(name string) {
 	w.Load()
-	tx := Begin(config.db)
-	Exec(tx, "delete from watchlist where name=?;", name)
+	tx, _ := config.db.Beginx()
+	tx.Execf("delete from watchlist where name=?;", name)
 	tx.Commit()
 }
 
 func (w *Watchlist) Set(name string, number string) error {
 	w.Load()
-	tx := Begin(config.db)
-	rows := Query(tx, "select name, chapter from watchlist where name LIKE ?", name)
-
-	items := make([]*ListItem, 0)
-
-	for rows.Next() {
-		item := new(ListItem)
-		rows.Scan(&item.Name, &item.Number)
-		items = append(items, item)
+	items := []*ListItem{}
+	err := config.db.Select(&items, "select name, chapter from watchlist where name LIKE ?", name)
+	if err != nil {
+		return err
 	}
 
 	if len(items) != 1 {
 		return fmt.Errorf("Expected one match from name \"%s\", but found %d!", name, len(items))
 	}
 
-	Exec(tx, "update watchlist set chapter=? where name=?", number, items[0].Name)
+	tx, _ := config.db.Beginx()
+	tx.Execf("update watchlist set chapter=? where name=?", number, items[0].Name)
 	tx.Commit()
 	return nil
 }
 import (
 	"bytes"
 	"fmt"
-	"github.com/moovweb/gokogiri"
-	"github.com/moovweb/gokogiri/css"
-	"github.com/moovweb/gokogiri/html"
-	"github.com/moovweb/gokogiri/xml"
-	"github.com/moovweb/gokogiri/xpath"
+	"github.com/jmoiron/gokogiri"
+	"github.com/jmoiron/gokogiri/css"
+	"github.com/jmoiron/gokogiri/html"
+	"github.com/jmoiron/gokogiri/xml"
+	"github.com/jmoiron/gokogiri/xpath"
 	"regexp"
 	"strings"
 	"time"
 	expr := xpath.Compile(xpathexpr)
 	nxp := xpath.NewXPath(n.ptr)
 	defer nxp.Free()
-	nodes := nxp.Evaluate(n.ptr, expr)
+	nodes, _ := nxp.Evaluate(n.ptr, expr)
 	ret := make([]Node, len(nodes))
 	for i, ptr := range nodes {
 		ret[i].fromPtr(ptr, n.doc)
 import (
 	"fmt"
 	"github.com/jmoiron/go-pkg-optarg"
+	"os"
+	"runtime/pprof"
 	"strconv"
 	"strings"
 )
 	Help                 bool
 	Version              bool
 	Verbose              bool
+	Profile              bool
 	Update               bool
 	Force                bool
 	SetDownloadPath      string
 		fmt.Printf("%s\n", VERSION)
 		return
 	}
-	if opts.Verbose {
-		vPrintf("Verbosity on.\n")
+
+	if opts.Profile {
+		f, err := os.Create("ukiyo.pprof")
+		if err != nil {
+			fmt.Println(err)
+			return
+		}
+		vPrintf("Activating Profiling...\n")
+		pprof.StartCPUProfile(f)
+		vPrintf("Profiling activated.\n")
+		defer pprof.StopCPUProfile()
 	}
 
 	if len(opts.SetDownloadPath) > 0 {
 }
 
 func ListSites() {
-	var err error
-	rows := Query(config.db, "SELECT name, url, priority FROM sites ORDER BY priority")
-
-	for rows.Next() {
-		site := new(Site)
-		err = rows.Scan(&site.Name, &site.Url, &site.Priority)
-		if err == nil {
-			fmt.Printf(" %d. %s [%s]\n", site.Priority, site.Name, site.Url)
-		} else {
-			fmt.Printf("Error: %s\n", err)
-		}
+	sites := []*Site{}
+	config.db.Select(&sites, "SELECT name, url, priority FROM sites ORDER BY priority")
+	for _, site := range sites {
+		fmt.Printf(" %d. %s [%s]\n", site.Priority, site.Name, site.Url)
 	}
 }
 
 	series, err := FindSeries(true, terms...)
 	if err != nil {
 		fmt.Printf("Error searching for terms: %s\n", err)
+		return
 	}
 
 	for _, s := range series {
 }
 
 func DownloadChapters(terms ...string) {
+
 	UpdateSites()
 	series, err := FindOneSeries(true, terms...)
 	if err != nil {
 	optarg.Add("h", "help", "Show help.", false)
 	optarg.Add("", "version", "Show version and exit.", false)
 	optarg.Add("v", "verbose", "Show more output.", false)
+	optarg.Add("", "profile", "Profile ukiyo.", false)
 
 	optarg.Header("Downloading")
 	optarg.Add("u", "update", "Update all site & series info.", false)
 			opts.Version = opt.Bool()
 		case "verbose":
 			opts.Verbose = opt.Bool()
+		case "profile":
+			opts.Profile = opt.Bool()
 		// downloading
 		case "update":
 			opts.Update = opt.Bool()
 			opts.List = opt.Bool()
 		case "add":
 			opts.Add = opt.Bool()
-		case "rm", "del":
+		case "remove", "del":
 			opts.Remove = opt.Bool()
 		case "set":
 			opts.Set = opt.Bool()
 	"time"
 )
 
-// update once per day unless forced
+// update once per week unless forced
 var SITE_UPDATE_FREQUENCY = int64(86400) * 7
+
+// update once per day unless forced
 var CHAPTER_UPDATE_FREQUENCY = int64(86400)
 var MAX_CONCURRENT_WORKERS = 3
 
 	return r
 }
 
+func UpdateSeries(u Updater) []*Series {
+	url := u.SeriesListPath()
+	vPrintf("Updating via %s\n", url)
+
+	cachefile := u.CachePath()
+	document, err := HttpGetDocument(url, cachefile)
+	if err != nil {
+		fmt.Printf("Error fetching site %s\n", u.Name())
+		return []*Series{}
+	}
+	defer document.Free()
+	return u.UpdateSeriesList(document)
+}
+
+type Updater interface {
+	SeriesListPath() string
+	CachePath() string
+	Name() string
+	UpdateChapters(series *Series) []*Chapter
+	UpdateSeriesList(doc *Document) []*Series
+	GetPageUrls(string, *Document) []string
+	GetImageUrl(*Document) string
+}
+
+var Updaters = map[string]Updater{}
+
+func init() {
+	sites := []*Site{}
+	config.db.Select(&sites, "SELECT * FROM sites")
+	siteMap := map[string]*Site{}
+	for _, site := range sites {
+		siteMap[site.Name] = site
+	}
+
+	for k, v := range siteMap {
+		var u Updater
+		switch k {
+		case "starkana":
+			u = &Starkana{v}
+		case "mangahere":
+			u = &Mangahere{v}
+		}
+		if u != nil {
+			Updaters[v.Name] = u
+		}
+	}
+}
+
+var UpdateUrls = map[string]string{
+	"starkana":    "/manga/list",
+	"mangahere":   "/mangalist/",
+	"mangareader": "/alphabetical",
+	"mangafox":    "/manga/",
+}
+
+type Starkana struct{ *Site }
+
+func (s *Starkana) SeriesListPath() string { return UrlJoin(s.Site.Url, "/manga/list/") }
+func (s *Starkana) CachePath() string      { return "./cache/" + s.Site.Name + ".html" }
+func (s *Starkana) Name() string           { return s.Site.Name }
+
+func (s *Starkana) GetPageUrls(url string, doc *Document) []string {
+	options := doc.CssSelect("select#page_switch option")
+	if len(options) < 1 {
+		return []string{}
+	}
+	urls := make([]string, len(options)-1)
+	base := BaseUrl(url)
+
+	for i, o := range options[1:] {
+		urls[i] = UrlJoin(base, o.Attr("value"))
+	}
+	return urls
+}
+
+func (s *Starkana) GetImageUrl(doc *Document) string {
+	img := doc.CssSelect("#pic img")
+	if len(img) == 0 {
+		return ""
+	}
+	return img[0].Attr("src")
+}
+
+func (s *Starkana) UpdateChapters(series *Series) []*Chapter {
+	// always print what we're doing when we do this
+	fmt.Printf("Updating starkana for series %s\n", series.Name)
+
+	cachefile := fmt.Sprintf("./cache/%s-%s.html", series.Name, series.Site)
+	document, err := HttpGetDocument(series.Url, cachefile)
+	if err != nil {
+		fmt.Printf("Error getting %s\n", series.Name)
+		return []*Chapter{}
+	}
+	defer document.Free()
+	base := BaseUrl(series.Url)
+
+	anchors := document.CssSelect("a.download-link")
+	results := make([]*Chapter, len(anchors))
+	for i, anchor := range anchors {
+		c := &Chapter{}
+		c.Site = series.Site
+		c.Series = series.Name
+		c.Name = strings.Trim(anchor.Text(), " \t\r\n")
+		c.Url = strings.Trim(anchor.Attr("href"), " \t\r\n")
+		if !strings.HasPrefix(c.Url, "http") {
+			c.Url = UrlJoin(base, c.Url)
+		}
+		numelem := anchor.CssSelect("strong")
+		if len(numelem) > 0 {
+			c.Number = numelem[0].Text()
+		}
+		results[i] = c
+	}
+	vPrintf("Found %d chapters on starkana\n", len(results))
+	return results
+}
+
+func (s *Starkana) UpdateSeriesList(doc *Document) []*Series {
+	series := doc.CssSelect("#inner_page >div a")
+	results := make([]*Series, len(series))
+	for i, anchor := range series {
+		r := &Series{}
+		r.Site = s.Site.Name
+		r.Name = strings.Trim(anchor.Text(), "\t ")
+		r.Url = UrlJoin(s.Site.Url, anchor.Attr("href"))
+		spl := strings.Split(r.Url, "/")
+		r.Key = spl[len(spl)-1]
+		r.Url = r.Url + "?mature_confirm=1"
+		results[i] = r
+	}
+
+	fmt.Printf("Found %d series for starkana\n", len(results))
+	return results
+}
+
+type Mangahere struct{ *Site }
+
+func (m *Mangahere) SeriesListPath() string { return UrlJoin(m.Site.Url, "/mangalist/") }
+func (m *Mangahere) CachePath() string      { return "./cache/" + m.Site.Name + ".html" }
+func (m *Mangahere) Name() string           { return m.Site.Name }
+
+func (m *Mangahere) UpdateSeriesList(doc *Document) []*Series {
+	series := doc.CssSelect("div.list_manga li a")
+	results := make([]*Series, len(series))
+	fmt.Printf("Found %d series\n", len(series))
+
+	for i, anchor := range series {
+		r := &Series{}
+		r.Site = m.Site.Name
+		r.Name = strings.Trim(anchor.Text(), " \t")
+		r.Url = strings.Trim(anchor.Attr("href"), " \t")
+		if !strings.HasPrefix(r.Url, "http") {
+			r.Url = UrlJoin(m.Site.Url, r.Url)
+		}
+		url := strings.TrimRight(r.Url, "/")
+		spl := strings.Split(url, "/")
+		r.Key = spl[len(spl)-1]
+		results[i] = r
+	}
+	fmt.Printf("Found %d series for mangahere\n", len(results))
+	return results
+}
+
+func (m *Mangahere) UpdateChapters(series *Series) []*Chapter {
+	// always print when we do this
+	fmt.Printf("Updating mangahere for series %s\n", series.Name)
+
+	cachefile := fmt.Sprintf("./cache/%s-%s.html", series.Name, series.Site)
+	document, err := HttpGetDocument(series.Url, cachefile)
+	if err != nil {
+		fmt.Printf("Error getting %s\n", series.Name)
+		return []*Chapter{}
+	}
+	defer document.Free()
+	base := BaseUrl(series.Url)
+
+	anchors := document.CssSelect(".detail_list li a")
+	results := make([]*Chapter, len(anchors))
+	for i, anchor := range anchors {
+		c := &Chapter{}
+		c.Site = series.Site
+		c.Series = series.Name
+		c.Name = strings.Trim(anchor.Text(), " \t\n\r")
+		c.Url = strings.Trim(anchor.Attr("href"), " \t\n\r")
+
+		spl := strings.Split(strings.TrimRight(c.Url, "/"), "/")
+		c.Number = FindNumber(spl[len(spl)-1])
+		if len(c.Number) == 0 {
+			c.Number = FindNumber(c.Name)
+		} else {
+			// remove leading junk
+			c.Number = strings.TrimLeft(c.Number, " \t0")
+		}
+		if !strings.HasPrefix(c.Url, "http") {
+			c.Url = UrlJoin(base, c.Url)
+		}
+		results[i] = c
+	}
+	vPrintf("Found %d chapters on mangahere\n", len(results))
+	return results
+}
+
+func (m *Mangahere) GetPageUrls(url string, doc *Document) []string {
+	options := doc.CssSelect(".readpage_top .go_page option")
+	if len(options) < 1 {
+		return []string{}
+	}
+	urls := make([]string, len(options)-1)
+	base := BaseUrl(url)
+
+	for i, o := range options[1:] {
+		fragment := o.Attr("value")
+		if strings.HasPrefix(fragment, "http") {
+			urls[i] = fragment
+		} else {
+			urls[i] = UrlJoin(base, fragment)
+		}
+	}
+	return urls
+}
+
+func (m *Mangahere) GetImageUrl(doc *Document) string {
+	img := doc.CssSelect("#image")
+	if len(img) == 0 {
+		return ""
+	}
+	return img[0].Attr("src")
+}
+
 func UpdateSites(force ...bool) {
 	Force := DefaultFalse(force...)
 
 	}
 
 	sem := make(chan bool, MAX_CONCURRENT_WORKERS)
-	results := make([]map[string]string, 0)
+	results := []*Series{}
 
 	for _, s := range sites {
 		sem <- true
 		go func(site Site) {
 			defer func() { <-sem }()
-			ret := UpdateSite(site)
-			results = dictcat(results, ret)
+			updater, ok := Updaters[site.Name]
+			if !ok {
+				fmt.Printf("Unknown site-name %s, skipping update.\n", site.Name)
+				return
+			}
+			ret := UpdateSeries(updater)
+			for i := 0; i < len(ret); i++ {
+				results = append(results, ret[i])
+			}
 		}(s)
 	}
 	for i := 0; i < cap(sem); i++ {
 		sem <- true
 	}
 
-	tx := Begin(config.db)
+	tx, _ := config.db.Beginx()
 
 	vPrintf("Received %d total results\n", len(results))
 
 		(?, ?, ?, ?, coalesce((select updated from series where url = ?), 0))`
 
 	for _, r := range results {
-		tx.Exec(q, r["name"], r["key"], r["url"], r["site"], r["url"])
+		tx.Exec(q, r.Name, r.Key, r.Url, r.Site, r.Url)
 	}
 	for _, s := range sites {
 		tx.Exec("update sites set updated = ? where name = ?", now, s.Name)
 	tx.Commit()
 }
 
-func PrintDict(dict map[string]string) {
-	fmt.Printf("{\n")
-	for key, val := range dict {
-		fmt.Printf("  \"%s\": \"%s\",\n", key, val)
-	}
-	fmt.Printf("}\n")
-}
-
-var UpdateUrls = map[string]string{
-	"manga-access": "/manga/list",
-	"mangahere":    "/mangalist/",
-	"mangareader":  "/alphabetical",
-	"mangafox":     "/manga/",
-}
-
-func UpdateMangaaccess(site *Site, doc *Document) []map[string]string {
-	series := doc.CssSelect("#inner_page >div a")
-	data := make([]map[string]string, len(series))
-	for i, anchor := range series {
-		data[i] = map[string]string{
-			"site": site.Name,
-			"name": strings.Trim(anchor.Text(), "\t "),
-			"url":  UrlJoin(site.Url, anchor.Attr("href")),
-		}
-		spl := strings.Split(data[i]["url"], "/")
-		data[i]["key"] = spl[len(spl)-1]
-		data[i]["url"] = data[i]["url"] + "?mature_confirm=1"
-	}
-
-	fmt.Printf("Found %d series for manga-access\n", len(data))
-	return data
-}
-
-func UpdateMangahere(site *Site, doc *Document) []map[string]string {
-	series := doc.CssSelect("div.list_manga li a")
-	data := make([]map[string]string, len(series))
-
-	for i, anchor := range series {
-		data[i] = map[string]string{
-			"site": site.Name,
-			"name": strings.Trim(anchor.Text(), " \t"),
-			"url":  strings.Trim(anchor.Attr("href"), " \t"),
-		}
-		if !strings.HasPrefix(data[i]["url"], "http") {
-			data[i]["url"] = UrlJoin(site.Url, data[i]["url"])
-		}
-		url := strings.TrimRight(data[i]["url"], "/")
-		spl := strings.Split(url, "/")
-		data[i]["key"] = spl[len(spl)-1]
-	}
-	fmt.Printf("Found %d series for mangahere\n", len(data))
-	return data
-}
-
-func UpdateMangareader(site *Site, doc *Document) []map[string]string {
-	fmt.Printf("Update mangareader\n")
-	results := make([]map[string]string, 0)
-	return results
-}
-
-func UpdateMangafox(site *Site, doc *Document) []map[string]string {
-	fmt.Printf("Update mangafox\n")
-	results := make([]map[string]string, 0)
-	return results
-}
-
-var SiteUpdateFunctions = map[string]func(*Site, *Document) []map[string]string{
-	"manga-access": UpdateMangaaccess,
-	"mangahere":    UpdateMangahere,
-	"mangareader":  UpdateMangareader,
-	"mangafox":     UpdateMangafox,
-}
-
-func UpdateSite(site Site) []map[string]string {
-	path, ok := UpdateUrls[site.Name]
-	updater := SiteUpdateFunctions[site.Name]
-	none := make([]map[string]string, 0)
-
-	if !ok {
-		fmt.Printf("Unknown site-name %s, skipping update.\n", site.Name)
-		return none
-	}
-	url := UrlJoin(site.Url, path)
-	vPrintf("Updating %s via %s\n", site.Name, url)
-
-	cachefile := "./cache/" + site.Name + ".html"
-	document, err := HttpGetDocument(url, cachefile)
-	if err != nil {
-		fmt.Printf("Error fetching site %s\n", site.Name)
-		return none
-	}
-	defer document.Free()
-	return updater(&site, document)
-}
-
 // update chapters for a series.  searches the db for all sites that have
 // the series, and updates any which are too old for the chapters threshold
 func UpdateChapters(name string, force ...bool) []*Chapter {
 		after -= CHAPTER_UPDATE_FREQUENCY
 	}
 
-	series := QuerySeries(config.db, "select * from series where name = ? AND updated < ?", name, after)
-
+	series := []*Series{}
+	err := config.db.Select(&series, "select * from series where name = ?  AND updated < ?", name, after)
+	if err != nil {
+		fmt.Printf("Error: %s\n", err)
+		return []*Chapter{}
+	}
 	if len(series) > 0 {
 		if !Force {
 			vPrintf("Updating %d sites for %s last updated over 1 week ago:\n", len(series), name)
 		}
 
 		sem := make(chan bool, MAX_CONCURRENT_WORKERS)
-		results := make([]map[string]string, 0)
+		results := []*Chapter{}
 
 		for _, s := range series {
 			sem <- true
 			go func(s *Series) {
 				defer func() { <-sem }()
-				updater := ChapterUpdateFunctions[s.Site]
-				ret := updater(s)
-				results = dictcat(results, ret)
+				updater, ok := Updaters[s.Site]
+				if !ok {
+					fmt.Printf("Unknown site-name %s, skipping update.\n", s.Site)
+					return
+				}
+				ret := updater.UpdateChapters(s)
+				for i := 0; i < len(ret); i++ {
+					results = append(results, ret[i])
+				}
 			}(s)
 		}
 		for i := 0; i < cap(sem); i++ {
 			sem <- true
 		}
 
-		tx := Begin(config.db)
+		tx, _ := config.db.Beginx()
 
 		vPrintf("Received %d total results\n", len(results))
 
 		for _, s := range series {
-			Exec(tx, "delete from chapters where name=? and site=?", name, s.Site)
+			tx.Execf("delete from chapters where name=? and site=?", name, s.Site)
 		}
 
 		q := `insert or replace into chapters 
 		(?, ?, ?, ?, ?)`
 
 		for _, r := range results {
-			Exec(tx, q, r["name"], r["number"], r["url"], r["series"], r["site"])
+			tx.Execf(q, r.Name, r.Number, r.Url, r.Series, r.Site)
 		}
 		for _, s := range series {
-			Exec(tx, "update series set updated = ? where name = ? and site= ?", now, name, s.Site)
+			tx.Execf("update series set updated = ? where name = ? and site= ?", now, name, s.Site)
 		}
 		tx.Commit()
 	}
 	return chapters
 }
 
-func UpdateChaptersMangaaccess(series *Series) []map[string]string {
-	fmt.Printf("Updating manga-access for series %s\n", series.Name)
-	none := make([]map[string]string, 0)
-
-	cachefile := fmt.Sprintf("./cache/%s-%s.html", series.Name, series.Site)
-	document, err := HttpGetDocument(series.Url, cachefile)
-	if err != nil {
-		fmt.Printf("Error getting %s\n", series.Name)
-		return none
-	}
-	defer document.Free()
-	base := BaseUrl(series.Url)
-
-	anchors := document.CssSelect("a.download-link")
-	data := make([]map[string]string, len(anchors))
-	for i, anchor := range anchors {
-		data[i] = map[string]string{
-			"site":   series.Site,
-			"series": series.Name,
-			"name":   strings.Trim(anchor.Text(), " \t\r\n"),
-			"url":    strings.Trim(anchor.Attr("href"), " \t\r\n"),
-			"number": "",
-		}
-		if !strings.HasPrefix(data[i]["url"], "http") {
-			data[i]["url"] = UrlJoin(base, data[i]["url"])
-		}
-		numelem := anchor.CssSelect("strong")
-		if len(numelem) > 0 {
-			data[i]["number"] = numelem[0].Text()
-		}
-	}
-	vPrintf("Found %d chapters on manga-access\n", len(data))
-	return data
-}
-
-func UpdateChaptersMangahere(series *Series) []map[string]string {
-	fmt.Printf("Updating mangahere for series %s\n", series.Name)
-	none := make([]map[string]string, 0)
-
-	cachefile := fmt.Sprintf("./cache/%s-%s.html", series.Name, series.Site)
-	document, err := HttpGetDocument(series.Url, cachefile)
-	if err != nil {
-		fmt.Printf("Error getting %s\n", series.Name)
-		return none
-	}
-	defer document.Free()
-	base := BaseUrl(series.Url)
-
-	anchors := document.CssSelect(".detail_list li a")
-	data := make([]map[string]string, len(anchors))
-	for i, anchor := range anchors {
-		data[i] = map[string]string{
-			"site":   series.Site,
-			"series": series.Name,
-			"name":   strings.Trim(anchor.Text(), " \t\n\r"),
-			"url":    strings.Trim(anchor.Attr("href"), " \t\n\r"),
-			"number": "",
-		}
-		spl := strings.Split(strings.TrimRight(data[i]["url"], "/"), "/")
-		data[i]["number"] = FindNumber(spl[len(spl)-1])
-		if len(data[i]["number"]) == 0 {
-			data[i]["number"] = FindNumber(data[i]["name"])
-		} else {
-			// remove leading junk
-			data[i]["number"] = strings.TrimLeft(data[i]["number"], " \t0")
-		}
-		if !strings.HasPrefix(data[i]["url"], "http") {
-			data[i]["url"] = UrlJoin(base, data[i]["url"])
-		}
-	}
-	vPrintf("Found %d chapters on mangahere\n", len(data))
-	return data
-
-}
-func UpdateChaptersMangareader(series *Series) []map[string]string {
-	fmt.Printf("Updating mangareader for series %s\n", series.Name)
-	return []map[string]string{}
-}
-func UpdateChaptersMangafox(series *Series) []map[string]string {
-	fmt.Printf("Updating mangafox for series %s\n", series.Name)
-	return []map[string]string{}
-}
-
-var ChapterUpdateFunctions = map[string]func(*Series) []map[string]string{
-	"manga-access": UpdateChaptersMangaaccess,
-	"mangahere":    UpdateChaptersMangahere,
-	"mangareader":  UpdateChaptersMangareader,
-	"mangafox":     UpdateChaptersMangafox,
-}
-
 func SelectUrl(chapter *Chapter) (string, string) {
 	spl := strings.Split(chapter.Site, ",")
 	m := make(map[string]int, len(spl))
 	return "", ""
 }
 
-// TODO: do everything through this, as it's superior to what i've been doing
-
-type Downloader interface {
-	// get page urls via a document and the url string of that document
-	GetPageUrls(string, *Document) []string
-	GetImageUrl(*Document) string
-}
-
-type MangaAccess struct{}
-
-func (m *MangaAccess) GetPageUrls(url string, doc *Document) []string {
-	options := doc.CssSelect("select#page_switch option")
-	if len(options) < 1 {
-		return []string{}
-	}
-	urls := make([]string, len(options)-1)
-	base := BaseUrl(url)
-
-	for i, o := range options[1:] {
-		urls[i] = UrlJoin(base, o.Attr("value"))
-	}
-	return urls
-}
-
-func (m *MangaAccess) GetImageUrl(doc *Document) string {
-	img := doc.CssSelect("#pic img")
-	if len(img) == 0 {
-		return ""
-	}
-	return img[0].Attr("src")
-}
-
-type MangaHere struct{}
-
-func (m *MangaHere) GetPageUrls(url string, doc *Document) []string {
-	options := doc.CssSelect(".readpage_top .go_page option")
-	if len(options) < 1 {
-		return []string{}
-	}
-	urls := make([]string, len(options)-1)
-	base := BaseUrl(url)
-
-	for i, o := range options[1:] {
-		fragment := o.Attr("value")
-		if strings.HasPrefix(fragment, "http") {
-			urls[i] = fragment
-		} else {
-			urls[i] = UrlJoin(base, fragment)
-		}
-	}
-	return urls
-}
-
-func (m *MangaHere) GetImageUrl(doc *Document) string {
-	img := doc.CssSelect("#image")
-	if len(img) == 0 {
-		return ""
-	}
-	return img[0].Attr("src")
-}
-
-var Downloaders = map[string]Downloader{
-	"manga-access": new(MangaAccess),
-	"mangahere":    new(MangaHere),
-}
-
 func DownloadChapter(chapter *Chapter) error {
 	site, url := SelectUrl(chapter)
-	// fmt.Printf(" %s %s (%s, %s)\n", chapter.Series, chapter.Number, site, url)
-	downloader := Downloaders[site]
+	vPrintf(" %s %s (%s, %s)\n", chapter.Series, chapter.Number, site, url)
+	updater := Updaters[site]
 
 	doc, err := HttpGetDocument(url)
 	if err != nil {
+		fmt.Printf("Error fetching `%s`: %s\n", url, err)
 		return err
 	}
 	var destzip string
 			fmt.Sprintf("%s-c%s.zip", series, chapter.Number))
 	}
 
-	page_urls := downloader.GetPageUrls(url, doc)
+	page_urls := updater.GetPageUrls(url, doc)
 	numwidth := len(fmt.Sprintf("%d", len(page_urls)))
 	numfmt := fmt.Sprintf("%%0%dd", numwidth)
 	// fmt.Printf("Making destination dir: %s", destpath)
 	images := make(chan Img, len(page_urls))
 	completed := make(chan int)
 	// send the first image on the images channel
-	images <- Img{1, downloader.GetImageUrl(doc)}
+	images <- Img{1, updater.GetImageUrl(doc)}
 
 	// print a little updater in place about pages we're loading
 	go func(max int) {
 				fmt.Printf("Error fetching page %03d (%s)\n", num, url)
 				return
 			}
-			images <- Img{num, downloader.GetImageUrl(doc)}
+			images <- Img{num, updater.GetImageUrl(doc)}
 		}(i+2, s)
 	}