Commits

Anton Afanasyev committed 6b7a660

initial work on rewrite

  • Participants
  • Parent commits a188a10
  • Branches extended

Comments (0)

Files changed (1)

File autopager.iced

+# if required, launch updates and delay running until updates complete
+# when running:
+#	* get current page url
+#	* get list of rules that match current domain
+#	* 	* if none, find all rules with persistent IDs for the current domain and store them in settings
+#	* try the rules for the current domain
+#	* 	* if none match current page, try rules without persistent IDs
+#	* 	* if none match, try regexp
+
+getAllSources = ->
+	[
+		{
+			name: "AA rules"
+			url: "http://home.aa-fam.com/useful/userscripts/autopagerize/items.json"
+			guid: "35dcc84a-a774-4ff5-8f96-0f0991dcbb47"
+			expirationInterval: 24 * 60 * 60 * 1000
+		}
+		{
+			name: "AutoPagerize"
+			url: "http://wedata.net/databases/AutoPagerize/items.json"
+			guid: "4d93e476-2e07-40ef-a30e-d8983ea3ae9c"
+			expirationInterval: 24 * 60 * 60 * 1000 # daily
+		}
+		{
+			name: "AutoPager"
+			url: "http://rep.teesoft.info/autopager/json"
+			guid: "ddf24a5a-e7e4-405f-8095-4f66f582e389"
+			expirationInterval: 24 * 60 * 60 * 1000 # daily
+		}
+	]
+
+
+
+
+getCacheCallback = ( res, urlInfo, errCb ) ->
+	if (res.status != 200)
+		return errCb urlInfo
+
+	info = null
+	try
+		parsedDatas = null
+		try
+			parsedDatas = JSON.parse res.responseText
+		catch e
+			log "couldnt parse json in getCacheCallback(" + res + ", " + info + "). trying eval"
+			parsedDatas = eval res.responseText
+
+		mapFn = pickMapFunction parsedDatas[0]
+		info = parsedDatas.map( mapFn ).filter( (v) -> v!=null )
+	catch e
+		log "getCacheCallback(" + res + ", " + info + ") error:\r\n" + e
+		info = []
+
+	if info.length < 0
+		return errCb urlInfo
+
+	withoutUrl = info.filter( (i) -> ('urls' in i) || ('urlFunc' in i) )
+
+	withUrl = info.filter( (i) -> ('url' in i) )
+	withUrl.sort( (a, b) -> (b.url.length - a.url.length) )
+
+	info = withoutUrl.concat( withUrl )
+	#info = info.filter( (i) -> ('url' in i) || ('urls' in i) || ('urlFunc' in i) )
+	#info.sort( (a, b) -> (b.url.length - a.url.length) )
+
+	###
+	var r_keys = ['url', 'nextLink', 'insertBefore', 'pageElement']
+	info = info.map(function(i) {
+		var item = {};
+		r_keys.forEach(function(key) {
+			if (i[key])
+			{
+				item[key] = i[key];
+			}
+		});
+		return item;
+	});
+	###
+
+	cacheInfo[urlInfo.url] = {
+		url: urlInfo.url
+		expire: new Date(new Date().getTime() + (urlInfo.expire || CACHE_EXPIRE))
+		info: info
+	}
+
+	GM_setValue('cacheInfo', cacheInfo.toSource());
+	return
+
+getCacheErrorCallback = (urlInfo) ->
+	expire = new Date(new Date().getTime() + (urlInfo.expire || CACHE_EXPIRE))
+	if cacheInfo[urlInfo.url]
+		cacheInfo[urlInfo.url].expire = expire
+	else
+		cacheInfo[urlInfo.url] = {
+			url: urlInfo.url
+			expire: expire
+			info: []
+		}
+
+	GM_setValue( 'cacheInfo', cacheInfo.toSource() )
+	return
+
+
+makeAjaxRequest = ( url, callback ) ->
+	state = 0
+
+	opt =
+		method: 'GET'
+		url: url
+		onload: (res) ->
+			if state == -1
+				return
+
+			state = 1
+			callback {
+				result: res
+				ok: true
+			}
+		onerror: (res) ->
+			if state == -1
+				return
+
+			state = 2
+			callback {
+				result: res
+				ok: false
+			}
+
+	setTimeout () ->
+		if ( state <> 0 )
+			return
+
+		state = -1
+		callback {
+			result: null
+			ok: false
+		}
+	, XHR_TIMEOUT
+
+	GM_xmlhttpRequest(opt)
+
+
+refreshSingleSiteInfoImportUrl = ( importItem, cb ) ->
+	cacheItem = cacheInfo[importItem.url]
+	if ( cacheItem && new Date(cacheItem.expire) > dt )
+		return
+
+	await makeAjaxRequest importItem.url, defer result
+
+	if result.ok
+		getCacheCallback result.result, importItem, getCacheErrorCallback
+	else
+		getCacheErrorCallback importItem
+
+	cb result.ok
+
+checkUpdateSiteInfoImportUrls = () ->
+	out = []
+	await
+		for importItem,i in SITEINFO_IMPORT_URLS
+			refreshSingleSiteInfoImportUrl importItem, defer out[i]
+
+tryLaunchSiteInfoImportUrls = () ->
+	# try with regular ones, and then the Micro-format
+	allSiteInfos = [SITEINFO_IMPORT_URLS...]
+	allSiteInfos.push MICROFORMAT
+	
+	for importItem,i in allSiteInfos
+		cacheItem = cacheInfo[importItem.url]
+		if launchAutoPager cacheItem.info
+			return true
+
+	return false
+
+
+checkUpdateSiteInfoImportUrls()
+tryLaunchSiteInfoImportUrls()
+