Commits

Jason Moiron committed 30ad240

some sqlx refactoring, change UpdateSeries and UpdateChapters to GetSeries and Getchapters on updater as they do not update anything, add in the concept of a Search only site and the ability to activate searches on internet sites for those that might not have a chapter list

  • Participants
  • Parent commits 7c3d0f1

Comments (0)

Files changed (5)

File mangahere.go

 
 var _ Updater = &Mangahere{}
 
-func (m *Mangahere) SeriesListPath() string { return UrlJoin(m.Url, "/mangalist/") }
-func (m *Mangahere) Cache() HttpCache       { return nil }
-func (m *Mangahere) Name() string           { return m.Site.Name }
+func (m *Mangahere) SeriesListPath() string          { return UrlJoin(m.Url, "/mangalist/") }
+func (m *Mangahere) Cache() HttpCache                { return nil }
+func (m *Mangahere) Name() string                    { return m.Site.Name }
+func (m *Mangahere) Search(term ...string) []*Series { return nil }
 
-func (m *Mangahere) UpdateSeriesList(doc *goquery.Document) []*Series {
+func (m *Mangahere) GetSeriesList(doc *goquery.Document) []*Series {
 	series := make([]*Series, 0, 10)
 
 	for _, a := range doc.Find("div.list_manga li a").All() {
 	return series
 }
 
-func (m *Mangahere) UpdateChapters(series *Series) []*Chapter {
+func (m *Mangahere) GetChapters(series *Series) []*Chapter {
 	// always print when we do this
 	log.Printf("Updating %s for series %s\n", m.Name(), series.Name)
 
 
 // Return a list of all series matching terms.  If group is true, series are
 // grouped by their names and repeating fields are concatted w/ csv
-func FindSeries(group bool, terms ...string) ([]Series, error) {
-	q := `SELECT name,
-		group_concat(key),
-		group_concat(url),
-		group_concat(site),
-		updated
-	FROM series WHERE name LIKE ? OR key LIKE ?`
-	if group {
-		q = q + " GROUP BY name"
-	}
-	series := make([]Series, 0)
+func FindSeries(site string, terms ...string) ([]Series, error) {
+	series := make([]Series, 0, 10)
 	term := fmt.Sprintf("%%%s%%", strings.Join(terms, "%"))
+	var err error
 
-	rows, err := config.db.Query(q, term, term)
-	if err != nil {
-		return series, err
-	}
+	q := `SELECT name,
+		group_concat(key) as key,
+		group_concat(url) as url,
+		group_concat(site) as site,
+		updated
+	FROM series WHERE `
 
-	for rows.Next() {
-		s := Series{}
-		err = rows.Scan(&s.Name, &s.Key, &s.Url, &s.Site, &s.Updated)
-		if err != nil {
-			fmt.Printf("Error scanning row: %s\n", err)
-		}
-		series = append(series, s)
+	if len(site) == 0 {
+		q += `name LIKE ? OR key LIKE ? GROUP BY NAME`
+		err = config.db.Select(&series, q, term, term)
+	} else {
+		q += `site = ? AND (name LIKE ? OR key LIKE ?) GROUP BY NAME`
+		err = config.db.Select(&series, q, site, term, term)
 	}
-
-	return series, nil
+	return series, err
 }
 
 // Like the series above, but attempts to find an exact match for the
 // terms being searched for;  if the name or the key is a case insensitive
 // full string match to the terms, the returned array will contain only that
-func FindOneSeries(group bool, terms ...string) ([]Series, error) {
-	series, err := FindSeries(group, terms...)
+func FindOneSeries(site string, terms ...string) ([]Series, error) {
+	series, err := FindSeries(site, terms...)
 	if len(series) < 2 {
 		return series, err
 	}
 	*Site
 }
 
-func (s *Starkana) SeriesListPath() string { return UrlJoin(s.Url, "/manga/list/") }
-func (s *Starkana) Cache() HttpCache       { return nil }
-func (s *Starkana) Name() string           { return s.Site.Name }
+var _ Updater = new(Starkana)
+
+func (s *Starkana) SeriesListPath() string          { return UrlJoin(s.Url, "/manga/list/") }
+func (s *Starkana) Cache() HttpCache                { return nil }
+func (s *Starkana) Name() string                    { return s.Site.Name }
+func (s *Starkana) Search(term ...string) []*Series { return nil }
 
 func (s *Starkana) GetPageUrls(doc *goquery.Document) []string {
 	options := doc.Find("select#page_switch option").All()
 	return urls
 }
 
-func (s *Starkana) GetImageUrl(doc *goquery.Document) string {
-	return Attr(doc.Find("#pic img"), "src")
-}
-
-func (s *Starkana) UpdateChapters(series *Series) []*Chapter {
+func (s *Starkana) GetChapters(series *Series) []*Chapter {
 	// always print what we're doing when we do this
 	log.Printf("Updating %s for series %s\n", s.Name(), series.Name)
 
 	return results
 }
 
-func (s *Starkana) UpdateSeriesList(doc *goquery.Document) []*Series {
+func (s *Starkana) GetSeriesList(doc *goquery.Document) []*Series {
 	series := make([]*Series, 0, 10)
 	for _, a := range doc.Find("#inner_page >div a").All() {
 		r := &Series{}
 	return series
 }
 
-var _ Updater = new(Starkana)
+func (s *Starkana) GetImageUrl(doc *goquery.Document) string {
+	return Attr(doc.Find("#pic img"), "src")
+}
 	Force                bool
 	SetDownloadPath      string
 	Download             bool
+	Site                 string
 	ListSites            bool
 	AddSite              string
 	RemoveSite           string
 	SetSitePriority      string
 	ToggleUseUnderscores bool
+	UseActiveSearches    bool
 	List                 bool
 	Add                  bool
 	Remove               bool
 
 func Search(terms ...string) {
 	UpdateSites(false)
-	series, err := FindSeries(true, terms...)
+	series, err := FindSeries(opts.Site, terms...)
 	if err != nil {
-		fmt.Printf("Error searching for terms: %s\n", err)
+		log.Printf("Search: %s\n", err)
 		return
 	}
 
 	term := strings.Join(terms, " ")
 	w := new(Watchlist)
 
-	series, err := FindOneSeries(true, term)
+	series, err := FindOneSeries(opts.Site, term)
 	if err != nil {
 		return
 	}
 	term := strings.Join(terms, " ")
 	w := new(Watchlist)
 
-	series, err := FindOneSeries(true, term)
+	series, err := FindOneSeries(opts.Site, term)
 	if err != nil {
 		return
 	}
 
 func Show(terms ...string) {
 	UpdateSites()
-	series, err := FindOneSeries(true, terms...)
+	series, err := FindOneSeries(opts.Site, terms...)
 	if err != nil {
 		fmt.Printf("Error searching for terms: %s\n", err)
 	}
 }
 
 func DownloadChapters(terms ...string) {
-
 	UpdateSites()
-	series, err := FindOneSeries(true, terms...)
+	series, err := FindOneSeries(opts.Site, terms...)
 	if err != nil {
 		fmt.Printf("Error searching for terms: %s\n", err)
 	}
 	optarg.Add("", "add-site", "<name> <url> [priority], Add a site.", "")
 	optarg.Add("", "rm-site", "<name>, Remove a site.", "")
 	optarg.Add("", "set-site-priority", "<name> <priority>, Set download priority.", "")
+	optarg.Add("", "site", "<name>, restrict to one site.", "")
+	optarg.Add("c", "active-search", "use sites which cannot be searched locally.", false)
 
 	optarg.Header("Series")
 	optarg.Add("l", "list", "List series being followed.", false)
 			opts.RemoveSite = opt.String()
 		case "set-site-priority":
 			opts.SetSitePriority = opt.String()
+		case "site":
+			opts.Site = opt.String()
+		case "active-search":
+			opts.UseActiveSearches = opt.Bool()
 		// series
 		case "list":
 			opts.List = opt.Bool()
 	Cache() HttpCache
 	// Name returns the name of this updater.
 	Name() string
-	// UpdateChapters returns a list of Chapters for the series being updated.
-	UpdateChapters(series *Series) []*Chapter
-	// UpdateSeriesList takes a Document and returns a list of Series from it.
-	UpdateSeriesList(*goquery.Document) []*Series
+	// GetChapters returns a list of Chapters for the series being updated.
+	GetChapters(series *Series) []*Chapter
+	// GetSeriesList takes a Document and returns a list of Series from it.
+	GetSeriesList(*goquery.Document) []*Series
 	// GetPageUrls returns a list of the urls (as strings) on the page.
 	GetPageUrls(*goquery.Document) []string
 	// GetImageUrl returns the url of the image on the Document, which is a page.
 	GetImageUrl(*goquery.Document) string
+	// Search does a live search against the website.  For sites that use full
+	// lists and local searches, this should be a noop.
+	Search(terms ...string) []*Series
 }
 
 // UpdateSeries uses an updater to load the cache of series at that site, if the
 	if err != nil {
 		log.Fatalf("Invalid URL `%s` loaded without error, but parsing failed: %s\n", seriesUrl, err)
 	}
-	return u.UpdateSeriesList(document)
+	return u.GetSeriesList(document)
 }
 
 func GetDocument(u Updater, Url string) (*goquery.Document, error) {
 		siteMap[site.Name] = site
 	}
 
-	for k, v := range siteMap {
-		var u Updater
-		switch k {
+	for key, val := range siteMap {
+		switch key {
 		case "starkana":
-			u = &Starkana{v}
+			UpdaterRegistry[key] = &Starkana{val}
 		case "mangahere":
-			u = &Mangahere{v}
-		}
-		if u != nil {
-			UpdaterRegistry[v.Name] = u
+			UpdaterRegistry[key] = &Mangahere{val}
 		}
 	}
 }
 
-var UpdateUrls = map[string]string{
-	"starkana":    "/manga/list",
-	"mangahere":   "/mangalist/",
-	"mangareader": "/alphabetical",
-	"mangafox":    "/manga/",
-}
-
 func UpdateSites(force ...bool) {
 	Force := DefaultFalse(force...)
 
 	}
 
 	q := "select name, url, priority, updated from sites WHERE updated < ? ORDER BY priority"
-	rows, err := config.db.Query(q, after)
+	err := config.db.Select(&sites, q, after)
 	if err != nil {
 		panic(err)
 	}
 
-	for rows.Next() {
-		site := new(Site)
-		err = rows.Scan(&site.Name, &site.Url, &site.Priority, &site.Updated)
-		if err != nil {
-			fmt.Printf("Error: %s\n", err)
-		}
-		sites = append(sites, *site)
-	}
-
 	if len(sites) == 0 {
 		return
 	}
 		go func(site Site) {
 			defer func() {
 				if r := recover(); r != nil {
-					fmt.Printf("Recovered in %#v: %s\n", site, r)
+					log.Printf("Recovered in %#v: %s\n", site, r)
 				}
 				<-sem
 			}()
 			updater, ok := UpdaterRegistry[site.Name]
 			if !ok {
-				fmt.Printf("Unknown site-name %s, skipping update.\n", site.Name)
+				log.Printf("Unknown site-name or no updater for %s, skipping update.\n", site.Name)
 				return
 			}
 			ret := UpdateSeries(updater)
-			for i := 0; i < len(ret); i++ {
-				results = append(results, ret[i])
-			}
+			results = append(results, ret...)
 		}(s)
 	}
 	for i := 0; i < cap(sem); i++ {
 		sem <- true
 	}
 
-	tx, _ := config.db.Beginx()
-
+	tx := config.db.MustBegin()
 	vprintf("Received %d total results\n", len(results))
 
 	q = `insert or replace into series 
 					fmt.Printf("Unknown site-name %s, skipping update.\n", s.Site)
 					return
 				}
-				ret := updater.UpdateChapters(s)
-				for i := 0; i < len(ret); i++ {
-					results = append(results, ret[i])
-				}
+				chapters := updater.GetChapters(s)
+				results = append(results, chapters...)
 			}(s)
 		}
 		for i := 0; i < cap(sem); i++ {
 	os.RemoveAll(destpath)
 	close(completed)
 
-	config.Log(fmt.Sprintf("Downloaded %s %s from %s", chapter.Series, chapter.Number, site))
+	msg := fmt.Sprintf("Downloaded %s %s from %s", chapter.Series, chapter.Number, site)
+	config.Log(msg)
 
 	return nil
 }