diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..8e2cc49 --- /dev/null +++ b/.gitignore @@ -0,0 +1,14 @@ +clean_output.html +article_extractor/build/ +log/ +*.BROKEN.* +*.o +_crash_report.txt +dispatch/cache* +dispatch/output +copyright_removal/*.txt +copyright_removal/foo +/geo +/Scripts.txt +article_extractor/evaluation/ +feed_mgmt/* diff --git a/article_extractor/HtmlCleanTagProb.dat b/article_extractor/HtmlCleanTagProb.dat new file mode 100644 index 0000000..78b7407 Binary files /dev/null and b/article_extractor/HtmlCleanTagProb.dat differ diff --git a/article_extractor/__init__.py b/article_extractor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/article_extractor/article_extractor.cpp b/article_extractor/article_extractor.cpp new file mode 100644 index 0000000..c47c7d3 --- /dev/null +++ b/article_extractor/article_extractor.cpp @@ -0,0 +1,120 @@ +#include +#include "base.h" +#include "htmlclean.h" +#include + +static TStr readFile(TStr path) { + FILE * pFile; + unsigned long lSize; + char * buffer; + size_t result; + + pFile = fopen (path.CStr(), "rb"); + if (pFile==NULL) {fputs ("File error",stderr); exit (1);} + + // obtain file size: + fseek (pFile , 0 , SEEK_END); + lSize = ftell (pFile); + rewind (pFile); + + // allocate memory to contain the whole file: + buffer = (char*) malloc (sizeof(char)*lSize); + if (buffer == NULL) {fputs ("Memory error",stderr); exit (2);} + + // copy the file into the buffer: + result = fread (buffer,1,lSize,pFile); + if (result != lSize) {fputs ("Reading error",stderr); exit (3);} + + /* the whole file is now loaded in the memory buffer. */ + TStr ret(buffer); + + // terminate + fclose (pFile); + free (buffer); + + return ret; +} + +int main() { + TStr html = readFile("sample.html"); + int a=-1, b=-1; + THtmlClean *cleaner = new THtmlClean(); + TStr txt; + txt = cleaner->Extract(html); + delete cleaner; + printf(">>%s<< %d %d\n", txt.CStr(), a, b); +} + +TStr stripTags(TStr html) { + char *ret = new char[html.Len()+1]; int retLen=0; + char inQuotes=0; + bool inTag=false; + for (int i=0; i') { inTag = false; continue; } + } else { + if (c=='<') inTag = true; + } + + if (!inTag) { + ret[retLen++]=c; + } + } + ret[retLen] = '\0'; + TStr retStr(ret); + delete[] ret; + return retStr; +} + +extern "C" { + +THtmlClean *cleaner; + +static PyObject * +article_extractor_get_cleartext(PyObject *self, PyObject *args) +{ + const char *html_c; + if (!PyArg_ParseTuple(args, "s", &html_c)) + return NULL; + TStr html(html_c); + /* + html.ChangeStrAll("

","\n

"); + html.ChangeStrAll("

","\n
"); + html.ChangeStrAll("
","\n
"); + html.ChangeStrAll("
","\n
"); + */ + TStr txt = cleaner->Extract(html); + //txt = stripTags(txt); + if (txt.Len() > 100000) { + // the cleaner probably got it wrong + txt = ""; + } + /* + txt.ChangeStrAll("\r\n","\n"); + txt.ChangeStrAll("\r","\n"); + while (txt.ChangeStrAll("\n ","\n")>0); + while (txt.ChangeStrAll("\n\n","\n")>0); + */ + return Py_BuildValue("s", txt.CStr()); +} + +static PyMethodDef PyMethods[] = { + {"get_cleartext", article_extractor_get_cleartext, METH_VARARGS, + "Given HTML of a news article, return cleartext of the article body."}, + {NULL, NULL, 0, NULL} /* Sentinel */ +}; + +PyMODINIT_FUNC +initarticle_extractor(void) +{ + cleaner = new THtmlClean(); + (void) Py_InitModule("article_extractor", PyMethods); +} + +} diff --git a/article_extractor/article_extractor2.py b/article_extractor/article_extractor2.py new file mode 100644 index 0000000..d14eb4c --- /dev/null +++ b/article_extractor/article_extractor2.py @@ -0,0 +1,176 @@ +import re +import sys; sys.path.append('.') +import article_extractor +import sys; sys.path.append('..') +import util +import struct +import hashlib +import lxml.html, lxml.etree, lxml.html + +htmlWhitespace = re.compile(r'(
|])') +htmlTags = re.compile(r'<\s*/?\s*(\w+)[^>]*>') +htmlComments = re.compile(r'', re.DOTALL) +txtWhitespace = re.compile(r'[ \t]+') +multipleNewline = re.compile(r'(\n\s*)+') + +def _load_copyright_ngrams(path): + """ + Return a set of ngram hashes read from `path` which should contain lines + with two space-separated numbers: n and n-gram hash. + Such files are produced by `dump_common_maximal()` in find_freq_ngrams.cpp. + """ + try: + with open(path) as f: + return set(int(line.split()[1]) for line in f) + except Exception, e: + print 'Warning: failed to load copyright-ngrams data from %r.' % path + print 'Reason:', e + return set() +stop_ngrams = _load_copyright_ngrams("../copyright_removal/freq_ngrams.txt") + + + +def md5_64(txt): + "Lower 64 bits of md5. Cast as an uint64." + return struct.unpack("= a: kill_ranges[-1][1] = b+1 + else: kill_ranges.append([a,b+1]) + # advance to the next n-gram + a = s.find(' ',a)+1 + b = s.find(' ',b+1) + + if not kill_ranges: return txt # no changes + + slices = [slice(0, kill_ranges[0][0])] + for i in range(len(kill_ranges)-1): + slices.append(slice(kill_ranges[i][1], kill_ranges[i+1][0])) + slices.append(slice(kill_ranges[-1][1], -1)) # -1 to remove the trailing space character + s = ''.join(s[slc] for slc in slices) + + return s.decode('utf8', 'replace') + +def get_cleartext(html, logger=None): + """ + Converts a full-page html (utf8) to the cleartext (unicode) containing just the article body. + The first line of the return value is the title (can be empty). If there was an + error or if the html is suspected not to contain an article, an empty string is returned. + + `logger` should be None or a logging.Logger instance. + + `html` is usually text (unicode or utf8) can also be a lxml tree; in that case, some heuristic + cleanup is performed first. + + This calls the glib html->cleartext function, then does a bit of cleanup + and error checking. + """ + + if type(html) == lxml.html.HtmlElement: + # time for heuristic cleanup + xDoc = html + if xDoc is None: return '' + for el in xDoc.findall('.//*'): + info = (el.get('id','')+':'+el.get('class','')).lower() + # if the element is suspicious, replace it with "barrier" (a bunch of and tags) + # that the C module is very unlikely to include in the result + if re.search('foot|header|^nav|naviga[ct]|[ck]omm?ent|dis[kc]us|user|notice|spe[cz]ial|about', info) \ + and not re.search('main|article|content', info) and el.getparent() is not None: + idx = el.getparent().index(el) + el.getparent()[idx+1:idx+1] = [lxml.etree.fromstring('') for i in range(20)] + el.drop_tree() + html = lxml.etree.tostring(xDoc, encoding='utf8') + + + # If the output is very non-html-looking, don't bother with C++, it will only crash + if '\000' in html: + return '' + + # Do the decoding, but watch out for weirdness in return values + txt = article_extractor.get_cleartext(html) + try: + txt = txt.decode('utf8') + except UnicodeDecodeError: + if logger: + logger.exception('Article %s was cleartexted, but cleartext was not in utf8. Saved cleartext to /tmp/non_utf8. Exception:') + try: + with open('/tmp/non_utf8','w') as f: f.write(txt) + except: + pass + txt='' + + if len(txt) < 200: + # This can't be good/real + return '' + + # Fix up the output. Step 1: remove HTML tags. + # TODO: Need to strip tags from titles as well (rss crawler, gnews crawler). + # Move some of the code below to util.py, reuse. + global txtr; txtr = txt # for debug + # Step 1a: small normalizations + txt = txt.rstrip('<') # glib output glitch; this is present only sometimes + txt = txt.replace('\r\n','\n').replace('\r','\n') + txt = htmlComments.sub('', txt) + txt = htmlWhitespace.sub(' \n\\1', txt) + # Step 1b: strip html tags (not elements!) except ') + r = re.compile(r'<[^>]*?>') + print r.sub('',p.sub('', sys.stdin.read().replace('\n', ' '))) + +if __name__ == '__main__': + main() diff --git a/langdet/lang_stats.py b/langdet/lang_stats.py new file mode 100644 index 0000000..69ebfad --- /dev/null +++ b/langdet/lang_stats.py @@ -0,0 +1,37 @@ +""" +Get a sample of articles from the news DB, show language distribution according to + - existing 'lang' column in the DB + - google's CLD (executed on the fly for each article) +""" + +import os, sys +sys.path.extend(('.','..')) +from cleanDb import openConnection +import cld + +conn, cur = openConnection() +cur = conn.cursor('x') +cur.execute("SELECT m.id, p.content, m.lang_altcode FROM processed_article p JOIN feed_article_meta m ON (p.feed_articleid = m.id) WHERE p.mode='cleartext' ORDER BY m.id DESC LIMIT 100000") + +cnt = {} +cnt2 = {} +while True: + row = cur.fetchone() + if not row: break + aid, txt, lang = row; lang = str(lang[:2]) + lang2 = cld.detect(txt.encode('utf8','ignore'))[1] + cnt[lang] = cnt.get(lang,0)+1 + cnt2[lang2] = cnt2.get(lang2,0)+1 + print 'done',sum(cnt.itervalues()) + + +print 'done' + +def top(d,n=60): + for pair in sorted(d.iteritems(), key=lambda pair: -pair[1])[:n]: + print '%s %5d' % pair + +print 'DATABASE SAYS:' +top(cnt) +print '\nCLD SAYS:' +top(cnt2) diff --git a/langdet/langdet.py b/langdet/langdet.py new file mode 100755 index 0000000..2c321f2 --- /dev/null +++ b/langdet/langdet.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python + +import pickle +import numpy as np +import scipy as sp +import scipy.sparse + +import scriptdet + +def trigrams(l, tgmid): + "input: string, trigram->id map; output: tgmid generator" + # !bn: newline-je bi bilo fajn pretvort v presledke ? + for tgm in [l[i:i+3] for i in range(len(l)-2)]: + if tgm[1] == ' ': continue + tgm = ('<' if tgm[0] == ' ' else tgm[0]) + tgm[1] + ('>' if tgm[2] == ' ' else tgm[2]) + if tgm in tgmid: yield tgmid[tgm] + +def langdet(s, D): + "input: string, langID db; output: (lc_iso_639_3, lc_alt, trigram count)" + tgmid, langmap, M, Mi, P, U = D + tgmids = list(trigrams(s, tgmid)) + + if len(tgmids) == 0: return ('unk', 'unk', 0) + + T = M[tgmids,:].sum(0) + C = Mi[tgmids,:].sum(0) + c = len(tgmids) + W = c - C + T = T + P + (np.array(W, dtype=np.float64) * U) + + #return T, len(tgmids) + best_lang = T.argmax() + return langmap[best_lang][0], langmap[best_lang][1], len(tgmids) + #print W + #return langmap[best_lang][0], langmap[best_lang][1], len(tgmids), T + + +def load_langdet_db(path='langid_nb.pck'): + tgmid, langid, langmap, M, Mi, P, U = pickle.load(open(path)) + M.sort_indices() + Mi.sort_indices() + + scripts,script_beg = scriptdet.load_scripts() + + return (tgmid, langmap, M, Mi, P, U, scripts, script_beg) + +def langdet_s(s, D, scripts, script_beg): + "input: langdet + scriptdet params" + h = scriptdet.scriptdet(scripts, script_beg, s) + + # check for jp/cn/kr + try: + del h['Common'] + except: + pass + total_chars = sum(h.values()) + + scriptmap = [ + ('Hangul', 5, 'kr', 'kor'), + ({'Hiragana', 'Katakana'}, 5, 'ja', 'jpn'), # order of scripts is relevant -- jpn before cmn + ('Han', 5, 'zhh', 'cmn'), # langset ? (zhh: zh, han) + ('Khmer', 5, 'km', 'khm'), + ('Hebrew', 3, 'he', 'heb'), + ('Arabic', 3, 'ar', 'arb'), + ('Ethiopic', 3, 'am', 'amh'), + ('Armenian', 3, 'hy', 'hye'), + ('Bengali', 3, 'bn', 'ben'), + ('Myanmar', 3, 'my', 'mya'), + ('Georgian', 3, 'ka', 'kat'), + ('Lao', 3, 'lo', 'lao'), + ('Sinhala', 3, 'si', 'sin'), + ('Thai', 3, 'th', 'tha'), + ('Tibetan', 3, 'bo', 'bod'), + ('Greek', 3, 'el', 'ell'), + # Devanagari -> limit langdet to indian langset + # ('Cyrillic', 3, {}, None), # problem: bosnian: cyrillic AND latin. + # ('Latin', 2, None, None) + ] + + for script_name, script_ratio, lc_alt, lc_iso in scriptmap: + if type(script_name) == str: script_name = {script_name} + + if script_ratio * sum(h[x] for x in script_name) > total_chars: + if type(lc_iso) == str: + return (lc_iso, lc_alt, None) + else: + # return langdet(s, D, lc_iso) # limit to lc_iso languages + pass + + return langdet(s, D) + +def main(): + import sys + import codecs + import time + + print "loading db .." + D = load_langdet_db() + langmap = D[1] + print "... done." + + for fn in sys.argv[1:]: + print fn + if fn == '-': + fl = sys.stdin.read().decode('utf-8') + else: + fl = codecs.open(fn, encoding='utf-8').readlines() + + timing = [] + for i in range(1): + print '.', + start = time.time() + + #T,nids = langdet(' '.join(fl), D) + li,lc,nids = langdet_s(' '.join(fl), D[:-2], *D[-2:]) + print li,lc,nids + stop = time.time() + timing.append(stop-start) + + #q = [] + #for i in langmap: + # q.append((T[0,i], langmap[i][1])) + # q.sort(reverse=True) + #print q[:10] + + print sum(timing) / len(timing), nids + + +if __name__ == '__main__': + main() diff --git a/langdet/make_dataset.py b/langdet/make_dataset.py new file mode 100755 index 0000000..5d827ef --- /dev/null +++ b/langdet/make_dataset.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +import sys +sys.path.append('..') +import cleanDb +import langdet +from collections import defaultdict +import re +import codecs + +""" + create a trigram dataset for selected languages + use site tlds, exclude articles with predicted {eng,enz,fr,gl*,pt,de,ca,lad,ptb{pt:brasil},su,ru,unk,ar,it} + except for 'es' dataset -- don't exclude gl. + + params: target dir, langset: {lang:tld,tld,tld,...} +""" + +excluded_languages = {'eng','enz','fr','es','gl','pt','de','ca','lad','ptb','su','ru','ar','it'} # to morajo bit lang_altcode +known_fails = {'es':{'gl'}} +Q = "SELECT string_agg(content, '') FROM (select content from processed_article p inner join feed_article_meta m on m.id=p.feed_articleid inner join feed_article a on m.id = a.id inner join site s on s.id = a.siteid where s.tld in %(tlds)s and m.lang_altcode not in %(excluded)s limit %(limit)s) AS foo" + +def trigrams(l): + for tgm in [l[i:i+3] for i in range(len(l)-2)]: + if tgm[1] == ' ': continue + tgm = ('<' if tgm[0] == ' ' else tgm[0]) + tgm[1] + ('>' if tgm[2] == ' ' else tgm[2]) + yield tgm + +def main(): + db,cur = cleanDb.openConnection() + + dest_dir = sys.argv[1] + languages = {lang:(liso,lname,set(tlds.split(','))) for lang,liso,lname,tlds in [x.split(':') for x in sys.argv[2:]]} + + tbl = open(dest_dir + '/table.txt', 'w') + for lang in languages: + liso,lname,tlds = languages[lang] + print 'begin: ',lang,tlds # tlds = set of tlds, lang = lang_altcode + Qparams = {'tlds':tuple(tlds), 'excluded':tuple(excluded_languages - {lang} - known_fails.get(lang,set())), 'limit': 1000} + print "Q params", Qparams + cur.execute(Q, Qparams) + print cur.rowcount, "rows" + s = cur.fetchone()[0] + s = re.sub(r"(\s|[0-9])+", " ", s, flags=re.MULTILINE) + print 'strlen =', len(s) + #print s + #raw_input() + + tgms = defaultdict(int) + for tgm in trigrams(s): + tgms[tgm] += 1 + #print tgms + + tbl.write('%s\t%s\t%s\n' % (lang,liso,lname)) + codecs.open((dest_dir+'/%s-3grams.txt') % lang,'w', encoding='utf-8').write('\n'.join("%d %s"%(tgms[tgm],tgm) for tgm in tgms)) + + print 'end' + +if __name__ == '__main__': + main() diff --git a/langdet/make_langdet_db.py b/langdet/make_langdet_db.py new file mode 100755 index 0000000..b15fc2f --- /dev/null +++ b/langdet/make_langdet_db.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python + +""" +prebere NLTKjev 3gram dataset v sys.argv[1]/{*-3grams.txt in table.txt} +(dataset: http://borel.slu.edu/crubadan/) + +in zgradi tabelo verjetnosti za multinomski naivn bayes + +language prior je ocenjen iz sum(trigrams|lang) / sum(trigrams), namesto iz razmerja dokumentov (ker ga nimamo:) + +tabela je redka (5.2 jezika / trigram), zato je nima smisla zgostit z laplace-smoothed verjetnostmi za 0-probability trigrame; + +naj rajs classifier vod evidenco kok 0-prob trigramov je bilo pri vsakem jeziku, in to v enmu kosu na konc uposteva. + +ostale stevilke pa so laplace-smoothed. + +output: pickled tuple: + - tgmid: { tgm-text: ID_tgm } + - langid: { lang_altcode: (ID_lang, lang_isocode, langname) } + - langmap: { ID_lang: (lang_isocode, lang_altcode, langname) } + - Mp: numpy.array(dtype=float64): stolpci: jeziki, vrstice: trigrami, log probability, laplace smoothed + - Mip: numpy.array(dtype=int32): isto kot Mp, ampak Mip[x,y]=1 <=> Mp[x,y] > 0 + - P: prior log_e verjetnosti jezikov + - U: log_e verjetnost neznanega trigrama za posamezne jezike + +2012-02-17: + * v excluded_langs je spisek jezikov, ki jih ignoriramo (za 'enz' :) + * dataset nalozimo iz vec direktorijev (z enako strukturo); lang_alt kode se ne smejo prekrivat +""" + +import sys, os +from collections import defaultdict +import scipy.sparse +import scipy as sp +import numpy as np +import math +import pickle + +excluded_langs = {'enz'} + +def read_lang_table(d): + langs = [(d,) + tuple(x.split(None, 2)) for x in open(d + '/table.txt').readlines()] + return langs # array tuplov (dir, internal code, iso-639-3, name) + +def load_lang(d, lc): + f = open(d + '/' + lc + '-3grams.txt') + tgt = [x.split() for x in f.readlines()] + tgp = [(tgm.decode('utf-8'), int(c)) for c,tgm in tgt] + return dict(tgp) + +def transform(ds, dsum, tsum): + "ds je cel dataset, dsum je summary dict (tgm:count), tsum pa vsota vseh tgmjev v korpusu" + nsum = len(dsum) + print "%d trigrams, %d unique -- wtf ?" % (tsum, nsum) + + tgmid = {t:i for i,t in enumerate(dsum)} + print len(tgmid) + print "dataset: tgms =", len(tgmid), "ds =", len(ds) + + M = scipy.sparse.lil_matrix((len(tgmid), len(ds)), dtype=scipy.float64) # stolpci: jeziki, vrstice: trigrami, log probability, laplace smoothed + Mi = scipy.sparse.lil_matrix((len(tgmid), len(ds)), dtype=scipy.int32) # 'bool' prisotnosti + P = np.zeros((1, len(ds)), dtype=np.float64) # prior log prob, base e + U = np.zeros((1, len(ds)), dtype=np.float64) # unknown trigram logprob + + langid, langmap = {}, {} + + for i,(c,iso,n,d) in enumerate(ds): + print iso, '-', n + sum_tgms_lang = float(sum(d.viewvalues())) + P[0,i] = math.log(sum_tgms_lang / tsum) + U[0,i] = math.log(1.0 / (sum_tgms_lang + len(dsum))) + for tgm, count in d.viewitems(): + M[tgmid[tgm], i] = math.log((count + 1.0) / (sum_tgms_lang + len(dsum))) + Mi[tgmid[tgm], i] = 1 + langid[c] = (i,iso,n) + langmap[i] = (iso,c,n) + + Mp = scipy.sparse.csr_matrix(M) + Mip = scipy.sparse.csr_matrix(Mi) + D = (tgmid, langid, langmap, Mp, Mip, P, U) + pickle.dump(D, open('langid_nb.pck', 'w')) + +def main(dirs): + lts = [read_lang_table(d) for d in dirs] + print "found %s languages" % (', '.join([str(len(lt)) for lt in lts]),) + + ds = [] + dsum = defaultdict(int) + + for xd, lc, liso, lname in sum(lts,[]): + if lc in excluded_langs: continue + ld = load_lang(xd, lc) + ds.append((lc,liso,lname.strip(),ld)) + for tgm in ld: + dsum[tgm] += ld[tgm] + + transform(ds, dsum, sum(dsum.viewvalues())) + + print "done" + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/langdet/scriptdet.py b/langdet/scriptdet.py new file mode 100755 index 0000000..58739fa --- /dev/null +++ b/langdet/scriptdet.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python + +# Scripts.txt comes from http://unicode.org/Public/UNIDATA/Scripts.txt + +import re +import bisect +import collections +import sys + +def load_scripts(): + d = [((None,None),None)] + + r = re.compile(r"([^; ]*)[ ]*; ([^# ]*) (#.*)?") + for line in open('Scripts.txt'): + m = r.match(line) + if m: + intervals,script = m.groups()[:2] + if '.' in intervals: interval = intervals.split('..') + else: interval = (intervals, intervals) + interval = tuple(map(lambda x: int(x, 16), interval)) + if (interval[0]-1) == d[-1][0][1] and script == d[-1][1]: + d[-1] = ((d[-1][0][0], interval[1]), d[-1][1]) + else: + d.append((interval,script)) + sd = sorted(d[1:]) + interval_start = [x[0][0] for x in sd] + return sd, interval_start + +def find_script(scripts, script_beg, c): + idx = bisect.bisect(script_beg, ord(c)) - 1 + interval = scripts[idx][0] + if interval[0] <= ord(c) <= interval[1]: + return scripts[idx][1] + else: + return '#unknown#' + +def scriptdet(scripts, script_beg, s): + hist = collections.defaultdict(int) + for ch in s: + script = find_script(scripts, script_beg, ch) + if script: + hist[script] += 1 + return hist + +def main(): + ints, intss = load_scripts() + print len(ints), "script intervals" + + print dict(scriptdet(ints, intss, sys.stdin.read().decode('utf-8'))) + +if __name__ == '__main__': + main() diff --git a/langdet/tsv b/langdet/tsv new file mode 100644 index 0000000..1a3596a --- /dev/null +++ b/langdet/tsv @@ -0,0 +1,436 @@ +aa aar aar Afar +ab abk abk Abkhazian + ace ace Achinese + ach ach Acoli + ada ada Adangme + afa afa Afro-Asiatic (Other) + afh afh Afrihili +af afr afr Afrikaans + aka aka Akan + akk akk Akkadian + ale ale Aleut + alg alg Algonquian languages +am amh amh Amharic + ang ang English, Old (ca. 450-1100) + apa apa Apache languages +ar ara ara Arabic + arc arc Aramaic + arn arn Araucanian + arp arp Arapaho + art art Artificial (Other) + arw arw Arawak +as asm asm Assamese + ath ath Athapascan languages + aus aus Australian languages + ava ava Avaric +ae ave ave Avestan + awa awa Awadhi +ay aym aym Aymara +az aze aze Azerbaijani + bad bad Banda + bai bai Bamileke languages +ba bak bak Bashkir + bal bal Baluchi + bam bam Bambara + ban ban Balinese + bas bas Basa + bat bat Baltic (Other) + bej bej Beja +be bel bel Belarussian [Byelorussian] + bem bem Bemba +bn ben ben Bengali [Bengali, Bangla] + ber ber Berber (Other) + bho bho Bhojpuri +bh bih bih Bihari + bik bik Bikol + bin bin Bini +bi bis bis Bislama + bla bla Siksika + bnt bnt Bantu (Other) +bo bod tib Tibetan +bs bos bos Bosnian + bra bra Braj +br bre bre Breton + btk btk Batak (Indonesia) + bua bua Buriat + bug bug Buginese +bg bul bul Bulgarian + cad cad Caddo + cai cai Central American Indian (Other) + car car Carib +ca cat cat Catalan + cau cau Caucasian (Other) + ceb ceb Cebuano + cel cel Celtic (Other) +cs ces cze Czech +ch cha cha Chamorro + chb chb Chibcha +ce che che Chechen + chg chg Chagatai + chk chk Chuukese + chm chm Mari + chn chn Chinook jargon + cho cho Choctaw + chp chp Chipewyan + chr chr Cherokee +cu chu chu Church Slavic +cv chv chv Chuvash + chy chy Cheyenne + cmc cmc Chamic languages + cop cop Coptic +kw cor cor Cornish +co cos cos Corsican + cpe cpe Creoles and pidgins, English-based (Other) + cpf cpf Creoles and pidgins, French-based (Other) + cpp cpp Creoles and pidgins, Portuguese-based (Other) + cre cre Cree + crp crp Creoles and pidgins (Other) + cus cus Cushitic (Other) +cy cym wel Welsh + dak dak Dakota +da dan dan Danish + day day Dayak + del del Delaware + den den Slave (Athapascan) +de deu ger German + dgr dgr Dogrib + din din Dinka + div div Divehi + doi doi Dogri + dra dra Dravidian (Other) + dua dua Duala + dum dum Dutch, Middle (ca. 1050-1350) + dyu dyu Dyula +dz dzo dzo Dzongkha [Bhutani] + efi efi Efik + egy egy Egyptian (Ancient) + eka eka Ekajuk +el ell gre Greek, Modern (post 1453) + elx elx Elamite +en eng eng English + enm enm English, Middle (1100-1500) +eo epo epo Esperanto +et est est Estonian +eu eus baq Basque + ewe ewe Ewe + ewo ewo Ewondo + fan fan Fang +fo fao fao Faroese +fa fas per Persian + fat fat Fanti +fj fij fij Fijian [Fiji] +fi fin fin Finnish + fiu fiu Finno-Ugrian (Other) + fon fon Fon +fr fra fre French + frm frm French, Middle (ca. 1400-1600) + fro fro French, Old (842-ca. 1400) +fy fry fry Frisian + ful ful Fulah + fur fur Friulian + gaa gaa Ga + gay gay Gayo + gba gba Gbaya + gem gem Germanic (Other) + gez gez Geez + gil gil Gilbertese +gd gla gla Gaelic (Scots) [Scots Gaelic] +ga gle gle Irish +gl glg glg Gallegan [Galician] +gv glv glv Manx [Manx Gaelic] + gmh gmh German, Middle High (ca. 1050-1500) + goh goh German, Old High (ca. 750-1050) + gon gon Gondi + gor gor Gorontalo + got got Gothic + grb grb Grebo + grc grc Greek, Ancient (to 1453) +gn grn grn Guarani +gu guj guj Gujarati + gwi gwi Gwich'in + hai hai Haida +ha hau hau Hausa + haw haw Hawaiian +he heb heb Hebrew +hz her her Herero + hil hil Hiligaynon + him him Himachali +hi hin hin Hindi + hit hit Hittite + hmn hmn Hmong +ho hmo hmo Hiri Motu +hr hrv scr Croatian +hu hun hun Hungarian + hup hup Hupa +hy hye arm Armenian + iba iba Iban + ibo ibo Igbo + ijo ijo Ijo +iu iku iku Inuktitut +ie ile ile Interlingue + ilo ilo Iloko +ia ina ina Interlingua (International Auxiliary Language Association) + inc inc Indic (Other) +id ind ind Indonesian + ine ine Indo-European (Other) +ik ipk ipk Inupiaq [Inupiak] + ira ira Iranian (Other) + iro iro Iroquoian languages +is isl ice Icelandic +it ita ita Italian +jw jaw jav Javanese +ja jpn jpn Japanese + jpr jpr Judeo-Persian + jrb jrb Judeo-Arabic + kaa kaa Kara-Kalpak + kab kab Kabyle + kac kac Kachin +kl kal kal Kalaallisut [Greenlandic] + kam kam Kamba +kn kan kan Kannada + kar kar Karen +ks kas kas Kashmiri +ka kat geo Georgian + kau kau Kanuri + kaw kaw Kawi +kk kaz kaz Kazakh + kha kha Khasi + khi khi Khoisan (Other) +km khm khm Khmer [Cambodian] + kho kho Khotanese +ki kik kik Kikuyu +rw kin kin Kinyarwanda +ky kir kir Kirghiz + kmb kmb Kimbundu + kok kok Konkani +kv kom kom Komi + kon kon Kongo +ko kor kor Korean + kos kos Kosraean + kpe kpe Kpelle + kro kro Kru + kru kru Kurukh +kj kua kua Kuanyama + kum kum Kumyk +ku kur kur Kurdish + kut kut Kutenai + lad lad Ladino + lah lah Lahnda + lam lam Lamba +lo lao lao Lao [Laothian] +la lat lat Latin +lv lav lav Latvian [Latvian, Lettish] + lez lez Lezghian +ln lin lin Lingala +lt lit lit Lithuanian + lol lol Mongo + loz loz Lozi +lb ltz ltz Letzeburgesch [Luxembourgish] + lua lua Luba-Lulua + lub lub Luba-Katanga + lug lug Ganda + lui lui Luiseno + lun lun Lunda + luo luo Luo (Kenya and Tanzania) + lus lus Lushai + mad mad Madurese + mag mag Magahi +mh mah mah Marshall + mai mai Maithili + mak mak Makasar +ml mal mal Malayalam + man man Mandingo + map map Austronesian (Other) +mr mar mar Marathi + mas mas Masai + mdr mdr Mandar + men men Mende + mga mga Irish, Middle (900-1200) + mic mic Micmac + min min Minangkabau + mis mis Miscellaneous languages +mk mkd mac Macedonian + mkh mkh Mon-Khmer (Other) +mg mlg mlg Malagasy +mt mlt mlt Maltese + mnc mnc Manchu + mni mni Manipuri + mno mno Manobo languages + moh moh Mohawk +mo mol mol Moldavian +mn mon mon Mongolian + mos mos Mossi +mi mri mao Maori +ms msa may Malay + mul mul Multiple languages + mun mun Munda languages + mus mus Creek + mwr mwr Marwari +my mya bur Burmese + myn myn Mayan languages + nah nah Nahuatl + nai nai North American Indian (Other) +na nau nau Nauru +nv nav nav Navajo +nr nbl nbl Ndebele, South +nd nde nde Ndebele, North +ng ndo ndo Ndonga +ne nep nep Nepali + new new Newari + nia nia Nias + nic nic Niger-Kordofanian (Other) + niu niu Niuean +nl nld dut Dutch +nn nno nno Norwegian Nynorsk +nb nob nob Norwegian Bokmål + non non Norse, Old +no nor nor Norwegian + nso nso Sotho, Northern + nub nub Nubian languages +ny nya nya Chichewa; Nyanja + nym nym Nyamwezi + nyn nyn Nyankole + nyo nyo Nyoro + nzi nzi Nzima +oc oci oci Occitan (post 1500); Provençal + oji oji Ojibwa +or ori ori Oriya +om orm orm Oromo [(Afan) Oromo] + osa osa Osage +os oss oss Ossetian; Ossetic + ota ota Turkish, Ottoman (1500-1928) + oto oto Otomian languages + paa paa Papuan (Other) + pag pag Pangasinan + pal pal Pahlavi + pam pam Pampanga +pa pan pan Panjabi [Punjabi] + pap pap Papiamento + pau pau Palauan + peo peo Persian, Old (ca. 600-400 B.C.) + phi phi Philippine (Other) + phn phn Phoenician +pi pli pli Pali +pl pol pol Polish + pon pon Pohnpeian +pt por por Portuguese + pra pra Prakrit languages + pro pro ProvenÁal, Old (to 1500) +ps pus pus Pushto [Pashto, Pushto] +qu que que Quechua + raj raj Rajasthani + rap rap Rapanui + rar rar Rarotongan + roa roa Romance (Other) +rm roh roh Raeto-Romance, [Rhaeto-Romance] + rom rom Romany +ro ron rum Romanian +rn run run Rundi +ru rus rus Russian + sad sad Sandawe +sg sag sag Sango [Sangho] + sah sah Yakut + sai sai South American Indian (Other) + sal sal Salishan languages + sam sam Samaritan Aramaic +sa san san Sanskrit + sas sas Sasak + sat sat Santali + sco sco Scots + sel sel Selkup + sem sem Semitic (Other) + sga sga Irish, Old (to 900) + sgn sgn Sign languages + shn shn Shan + sid sid Sidamo +si sin sin Sinhalese [Singhalese] + sio sio Siouan languages + sit sit Sino-Tibetan (Other) + sla sla Slavic (Other) +sk slk slo Slovak +sl slv slv Slovenian +se sme sme Northern Sami + smi smi Sami languages Other +sm smo smo Samoan +sn sna sna Shona +sd snd snd Sindhi + snk snk Soninke + sog sog Sogdian +so som som Somali + son son Songhai +st sot sot Sotho, Southern [Sesotho] +es spa spa Spanish +sq sqi alb Albanian +sc srd srd Sardinian +sr srp scc Serbian + srr srr Serer + ssa ssa Nilo-Saharan (Other) +ss ssw ssw Swati + suk suk Sukuma +su sun sun Sundanese + sus sus Susu + sux sux Sumerian +sw swa swa Swahili +sv swe swe Swedish + syr syr Syriac +ty tah tah Tahitian + tai tai Tai (Other) +ta tam tam Tamil +tt tat tat Tatar +te tel tel Telugu + tem tem Timne + ter ter Tereno + tet tet Tetum +tg tgk tgk Tajik +tl tgl tgl Tagalog +th tha tha Thai + tig tig Tigre + tir tir Tigrinya + tiv tiv Tiv + tkl tkl Tokelau + tli tli Tlingit + tmh tmh Tamashek + tog tog Tonga (Nyasa) + ton ton Tonga (Tonga Islands) + tpi tpi Tok Pisin + tsi tsi Tsimshian +tn tsn tsn Tswana +ts tso tso Tsonga +tk tuk tuk Turkmen + tum tum Tumbuka +tr tur tur Turkish + tut tut Altaic (Other) + tvl tvl Tuvalu +tw twi twi Twi + tyv tyv Tuvinian + uga uga Ugaritic +ug uig uig Uighur +uk ukr ukr Ukrainian + umb umb Umbundu + und und Undetermined +ur urd urd Urdu +uz uzb uzb Uzbek + vai vai Vai + ven ven Venda +vi vie vie Vietnamese +vo vol vol Volap¸k + vot vot Votic + wak wak Wakashan languages + wal wal Walamo + war war Waray + was was Washo + wen wen Sorbian languages +wo wol wol Wolof +xh xho xho Xhosa + yao yao Yao + yap yap Yapese +yi yid yid Yiddish + yor yor Yoruba + ypk ypk Yupik languages + zap zap Zapotec + zen zen Zenaga +za zha zha Zhuang +zh zho chi Chinese + znd znd Zande +zu zul zul Zulu + zun zun Zuni diff --git a/public_html/down-arrow.png b/public_html/down-arrow.png new file mode 100644 index 0000000..40faf18 Binary files /dev/null and b/public_html/down-arrow.png differ diff --git a/public_html/index.html b/public_html/index.html new file mode 100644 index 0000000..08ffe27 --- /dev/null +++ b/public_html/index.html @@ -0,0 +1,156 @@ + + + + + + + + + + + +

+IJS newsfeed +
a clean, continuous, real-time aggregated stream of semantically enriched news articles from RSS-enabled sites across the world.
+

+ +

What it Does

+

The pipeline performs the following main steps:

+
  1. Periodically crawl a list of RSS feeds and a subset of Google News and obtain links to news articles +
  2. Download the articles, taking care not to overload any of the hosting servers +
  3. Parse each article to obtain +
    1. Potential new RSS sources mentioned in the HTML, to be used in step (1) +
    2. Cleartext version of the article body +
    +
  4. Process articles with Enrycher (English and Slovene only) +
  5. Expose two streams of news articles (cleartext and Enrycher-processed) to end users. +
  6. +
+ +

Demo Visualization

+

Visit http://newsfeed.ijs.si/visual_demo/ for a real-time visualization of the news stream.

+ +

Accessing the stream

+

We currently offer two streams: +

    +
  • all the articles, cleaned down to cleartext of article body; and
  • +
  • all english and slovene articles annotated with categorization and named entities as provided by Enrycher. +
+

Each stream of articles gets xml-formatted and segmented by time into .gz files, each a few MB in size. + +

The streams are password-protected but free for research use. To obtain a username, please contact Mitja Trampus and/or Blaz Novak at firstname.lastname@ijs.si. + +

Note: Due to the streaming nature of the pipeline, the articles in the gzipped files are only approximately chronological; +they are sorted in the order in which they were processed rather than published. + +

Downloading the Stream - API

+

The stream is accessible at http://newsfeed.ijs.si/stream/. +The URL accepts an optional ?after=TIMESTAMP parameter, where TIMESTAMP +takes the ISO format yyyy-mm-ddThh:mm:ssZ (Z denotes GMT timezone). The server will return +the oldest gzip created later than TIMESTAMP. +

HTTP headers (Content-disposition: attachment; filename="...") will contain the new gzip's filename which +you can use to generate the next query, and so on. If the after parameter is too recent (no newer gzips available), +HTTP 404 is returned. If no after is provided, the oldest available gzip is returned; we will attempt +to maintain a month's worth of backlog. + +

Downloading the Stream - Comfortably

+

You can also download our python script which uses the simple API +described above to poll the server at one-minute intervals and copies new .gz files to the local disk as they +are made available on the server. +

A sample call for downloading the public stream (the default) into folder ijs_news: +

./http2fs.py -o ijs_news
+

This will download the whole available history and continue to follow the real-time stream. To follow the X-Like stream (only available to X-Like project partners), we provide an additinal stream URL besides the default one: +

./http2fs.py -o ijs_news -f http://newsfeed.ijs.si/stream/ -f http://newsfeed.ijs.si/stream/xlike
+ +

Stream Contents and Format

+

To have a quick look at the real stream data, click either of the links in the API description above. +

Each .gz file contains a single XML tree. The root element, <article-set>, contains zero or more articles in the following format: +

+<article id="internal article ID; consistent across streams"> 
+   <source>
+      <uri> URL from which the article was discovered; typically the RSS feed </uri> 
+      <title> Title for the source </title> 
+      <type> MAINSTREAM_NEWS </type> 
+      <location?>
+         <longitude> publisher longitude in degrees </longitude>
+         <latitude> publisher latitude in degrees </latitude>
+         <city?> publisher city </city>
+         <country?> publisher country </country>
+      </location> 
+      <tags?> RSS-provided tags; the tag vocabulary is not controlled 
+         <tag> some_tag </tag>
+         <tag> another_tag </tag>
+      </tags>
+   </source>
+   <uri> URL from which the article was downloaded </uri>
+   <publish-date?> The publication time and date.</publish-date>
+   <retrieve-date> The retrieval time and date.</retrieve-date>
+   <lang> 3-letter ISO 639-2 language code </lang> 
+   <location?>
+      <longitude> story content longitude in degrees </longitude>
+      <latitude> story content latitude in degrees </latitude>
+      <city?> story city </city>
+      <country?> story country </country>
+   </location> 
+   <tags?> RSS-provided tags; the tag vocabulary is not controlled 
+      <tag> some_tag </tag>
+      <tag> another_tag </tag>
+   </tags>
+   <img?> The URL of a related image, usually a thumbnail. </img> 
+   <title> Title. Can be empty if we fail to identify it. </title> 
+   <body-cleartext>
+       Clear text body of the article, formatted only with <p> tags
+   </body-cleartext> 
+   <body-rych?; only English, Slovene>
+       Enriched article body; an XML subtree as returned by Enrycher.
+   </body-rych> 
+   <body-xlike?; only English, Spanish, Catalan>
+       Enriched article body; an XML subtree as returned by iSOCO; experimental.
+   </body-xlike> 
+</article>
+
+ +

Elements marked with ? are omitted if the data is missing. +

All times are in UTC and take the format yyyy-mm-ddThh:mm:ssZ. + +

Querying the stream

+

The contents of Enrycher stream are being index and can be queried using a web service. The service is +accessible at /query/news-search (JSon output) or /query/news-search-xml (XML output), and returns the latest (by time of arrival) 100 articles. The service accepts any combination of the following parameters:

+
    +
  • q=slovenia hockey — retrieve all articles with the words "slovenia" and "hockey" in the title or the body
  • +
  • qt=slovenia hockey — retrieve all articles with "slovenia" and "hockey" in the title
  • +
  • qb=slovenia hockey — retrieve all articles with "slovenia" and "hockey" in the body
  • +
  • cu=http://en.wikipedia.org/wiki/Slovenia — retrieve all annotatied with "Slovenia" wikipedia page
  • +
  • cl=Slovenia — with "Slovenia" named entity
  • +
  • lang=eng — retrieve all English articles
  • +
  • date=2012-06-06 — retrieve all articles published on 16th of March, 2012
  • +
  • offset=3 — used to retrieve more than first 100 articles; for example, offset 3 returns articles from 301 to 400
  • +
+

Example 1: /query/news-search?q=slovenia&lang=eng&date=2012-06-06

+

Example 2: /query/news-search-xml?q=slovenia&lang=eng&date=2012-06-06

+

Example 3: /query/news-search?c=http://en.wikipedia.org/wiki/Slovenia&lang=eng&date=2012-06-06

+ +

Try it

+

Try querying the pipeline and browsing retrieved articles using newsfeed search.

+ +

About

+

The pipeline has been developed and is being maintained by the Artificial Intelligence Laboratory at Jozef Stefan Institute, Slovenia. In case of questions, contact Mitja Trampus and/or Blaz Novak at firstname.lastname@ijs.si. +

Referencing: If you use newsfeed data, please reference it with the following paper: +

Trampus, Mitja and Novak, Blaz: The Internals Of An Aggregated Web News Feed. Proceedings of 15th Multiconference on Information Society 2012 (IS-2012). [PDF]
+

The development was supported in part by the RENDER, X-Like, PlanetData and MetaNet EU FP7 projects. + + + + diff --git a/public_html/search_demo/analytics.css b/public_html/search_demo/analytics.css new file mode 100644 index 0000000..fb67411 --- /dev/null +++ b/public_html/search_demo/analytics.css @@ -0,0 +1,104 @@ +@import url(http://fonts.googleapis.com/css?family=Karla); +@import url(http://fonts.googleapis.com/css?family=Montserrat); + +body, html { + background: #EEE; + margin:0; + padding:0; + + font-size: 16px; + font-family: 'Karla', sans-serif; +} + +.header { + left: 0px; + top: 0px; + right: 0px; + padding: 15px; + + background: #29527A; + + color: white; +} + +.title { + margin: 0px; + font-size: 36px; + font-family: 'Montserrat', sans-serif; +} + +.navigation { + left: 0px; + right: 0px; + padding: 15px; + + background: #29527A; + box-shadow: 0px 3px 5px #222; + + font-size: 24px; + color: white; +} + +.navigation a:link, .navigation a:visited { + color: white; + text-decoration: none; +} + +a:link, a:visited { + color: #29527A; + text-decoration: none; +} + +.dashboard { + float: left; + width: 800px; + padding: 15px; + margin: 15px; + + background: #CCC; + box-shadow: 3px 3px 5px #222; + + color: black; +} + +.dashboard_title { + margin: 10px 0px 10px; + font-size: 24px; +} + +.dashboard ul { + margin: 0px, 20px; +} + +.dashboard li { + margin: 5px +} + +/* login pages */ +.user_settings { + position: fixed; + top: 10px; + right: 10px; +} + +/* login page */ +.login_box { + font-family: Verdana, sans-serif; + background: #29527A; + + position: fixed; + bottom: 100px; + right: 100px; + + padding: 15px; + + font-size: 15px; + text-align: right; + color: white; + box-shadow: 3px 3px 5px #222; +} + +.login_box_body { + padding: 0px 0px 10px 0px; +} + diff --git a/public_html/search_demo/index.html b/public_html/search_demo/index.html new file mode 100644 index 0000000..b63d360 --- /dev/null +++ b/public_html/search_demo/index.html @@ -0,0 +1,50 @@ + + + + + + + + + + Newsfeed Search demo + + +

Newsfeed Search demo
+ +
+
Query
+
+
+ +
+
+ + + + \ No newline at end of file diff --git a/public_html/search_demo/jquery.js b/public_html/search_demo/jquery.js new file mode 100644 index 0000000..6ee5612 --- /dev/null +++ b/public_html/search_demo/jquery.js @@ -0,0 +1,7 @@ +/*! jQuery v1.7.1 jquery.com | jquery.org/license */ +(function (a, b) { + function cy(a) { return f.isWindow(a) ? a : a.nodeType === 9 ? a.defaultView || a.parentWindow : !1 } function cv(a) { if (!ck[a]) { var b = c.body, d = f("<" + a + ">").appendTo(b), e = d.css("display"); d.remove(); if (e === "none" || e === "") { cl || (cl = c.createElement("iframe"), cl.frameBorder = cl.width = cl.height = 0), b.appendChild(cl); if (!cm || !cl.createElement) cm = (cl.contentWindow || cl.contentDocument).document, cm.write((c.compatMode === "CSS1Compat" ? "" : "") + ""), cm.close(); d = cm.createElement(a), cm.body.appendChild(d), e = f.css(d, "display"), b.removeChild(cl) } ck[a] = e } return ck[a] } function cu(a, b) { var c = {}; f.each(cq.concat.apply([], cq.slice(0, b)), function () { c[this] = a }); return c } function ct() { cr = b } function cs() { setTimeout(ct, 0); return cr = f.now() } function cj() { try { return new a.ActiveXObject("Microsoft.XMLHTTP") } catch (b) { } } function ci() { try { return new a.XMLHttpRequest } catch (b) { } } function cc(a, c) { a.dataFilter && (c = a.dataFilter(c, a.dataType)); var d = a.dataTypes, e = {}, g, h, i = d.length, j, k = d[0], l, m, n, o, p; for (g = 1; g < i; g++) { if (g === 1) for (h in a.converters) typeof h == "string" && (e[h.toLowerCase()] = a.converters[h]); l = k, k = d[g]; if (k === "*") k = l; else if (l !== "*" && l !== k) { m = l + " " + k, n = e[m] || e["* " + k]; if (!n) { p = b; for (o in e) { j = o.split(" "); if (j[0] === l || j[0] === "*") { p = e[j[1] + " " + k]; if (p) { o = e[o], o === !0 ? n = p : p === !0 && (n = o); break } } } } !n && !p && f.error("No conversion from " + m.replace(" ", " to ")), n !== !0 && (c = n ? n(c) : p(o(c))) } } return c } function cb(a, c, d) { var e = a.contents, f = a.dataTypes, g = a.responseFields, h, i, j, k; for (i in g) i in d && (c[g[i]] = d[i]); while (f[0] === "*") f.shift(), h === b && (h = a.mimeType || c.getResponseHeader("content-type")); if (h) for (i in e) if (e[i] && e[i].test(h)) { f.unshift(i); break } if (f[0] in d) j = f[0]; else { for (i in d) { if (!f[0] || a.converters[i + " " + f[0]]) { j = i; break } k || (k = i) } j = j || k } if (j) { j !== f[0] && f.unshift(j); return d[j] } } function ca(a, b, c, d) { if (f.isArray(b)) f.each(b, function (b, e) { c || bE.test(a) ? d(a, e) : ca(a + "[" + (typeof e == "object" || f.isArray(e) ? b : "") + "]", e, c, d) }); else if (!c && b != null && typeof b == "object") for (var e in b) ca(a + "[" + e + "]", b[e], c, d); else d(a, b) } function b_(a, c) { var d, e, g = f.ajaxSettings.flatOptions || {}; for (d in c) c[d] !== b && ((g[d] ? a : e || (e = {}))[d] = c[d]); e && f.extend(!0, a, e) } function b$(a, c, d, e, f, g) { f = f || c.dataTypes[0], g = g || {}, g[f] = !0; var h = a[f], i = 0, j = h ? h.length : 0, k = a === bT, l; for (; i < j && (k || !l); i++) l = h[i](c, d, e), typeof l == "string" && (!k || g[l] ? l = b : (c.dataTypes.unshift(l), l = b$(a, c, d, e, l, g))); (k || !l) && !g["*"] && (l = b$(a, c, d, e, "*", g)); return l } function bZ(a) { return function (b, c) { typeof b != "string" && (c = b, b = "*"); if (f.isFunction(c)) { var d = b.toLowerCase().split(bP), e = 0, g = d.length, h, i, j; for (; e < g; e++) h = d[e], j = /^\+/.test(h), j && (h = h.substr(1) || "*"), i = a[h] = a[h] || [], i[j ? "unshift" : "push"](c) } } } function bC(a, b, c) { var d = b === "width" ? a.offsetWidth : a.offsetHeight, e = b === "width" ? bx : by, g = 0, h = e.length; if (d > 0) { if (c !== "border") for (; g < h; g++) c || (d -= parseFloat(f.css(a, "padding" + e[g])) || 0), c === "margin" ? d += parseFloat(f.css(a, c + e[g])) || 0 : d -= parseFloat(f.css(a, "border" + e[g] + "Width")) || 0; return d + "px" } d = bz(a, b, b); if (d < 0 || d == null) d = a.style[b] || 0; d = parseFloat(d) || 0; if (c) for (; g < h; g++) d += parseFloat(f.css(a, "padding" + e[g])) || 0, c !== "padding" && (d += parseFloat(f.css(a, "border" + e[g] + "Width")) || 0), c === "margin" && (d += parseFloat(f.css(a, c + e[g])) || 0); return d + "px" } function bp(a, b) { b.src ? f.ajax({ url: b.src, async: !1, dataType: "script" }) : f.globalEval((b.text || b.textContent || b.innerHTML || "").replace(bf, "/*$0*/")), b.parentNode && b.parentNode.removeChild(b) } function bo(a) { var b = c.createElement("div"); bh.appendChild(b), b.innerHTML = a.outerHTML; return b.firstChild } function bn(a) { var b = (a.nodeName || "").toLowerCase(); b === "input" ? bm(a) : b !== "script" && typeof a.getElementsByTagName != "undefined" && f.grep(a.getElementsByTagName("input"), bm) } function bm(a) { if (a.type === "checkbox" || a.type === "radio") a.defaultChecked = a.checked } function bl(a) { return typeof a.getElementsByTagName != "undefined" ? a.getElementsByTagName("*") : typeof a.querySelectorAll != "undefined" ? a.querySelectorAll("*") : [] } function bk(a, b) { var c; if (b.nodeType === 1) { b.clearAttributes && b.clearAttributes(), b.mergeAttributes && b.mergeAttributes(a), c = b.nodeName.toLowerCase(); if (c === "object") b.outerHTML = a.outerHTML; else if (c !== "input" || a.type !== "checkbox" && a.type !== "radio") { if (c === "option") b.selected = a.defaultSelected; else if (c === "input" || c === "textarea") b.defaultValue = a.defaultValue } else a.checked && (b.defaultChecked = b.checked = a.checked), b.value !== a.value && (b.value = a.value); b.removeAttribute(f.expando) } } function bj(a, b) { if (b.nodeType === 1 && !!f.hasData(a)) { var c, d, e, g = f._data(a), h = f._data(b, g), i = g.events; if (i) { delete h.handle, h.events = {}; for (c in i) for (d = 0, e = i[c].length; d < e; d++) f.event.add(b, c + (i[c][d].namespace ? "." : "") + i[c][d].namespace, i[c][d], i[c][d].data) } h.data && (h.data = f.extend({}, h.data)) } } function bi(a, b) { return f.nodeName(a, "table") ? a.getElementsByTagName("tbody")[0] || a.appendChild(a.ownerDocument.createElement("tbody")) : a } function U(a) { var b = V.split("|"), c = a.createDocumentFragment(); if (c.createElement) while (b.length) c.createElement(b.pop()); return c } function T(a, b, c) { b = b || 0; if (f.isFunction(b)) return f.grep(a, function (a, d) { var e = !!b.call(a, d, a); return e === c }); if (b.nodeType) return f.grep(a, function (a, d) { return a === b === c }); if (typeof b == "string") { var d = f.grep(a, function (a) { return a.nodeType === 1 }); if (O.test(b)) return f.filter(b, d, !c); b = f.filter(b, d) } return f.grep(a, function (a, d) { return f.inArray(a, b) >= 0 === c }) } function S(a) { return !a || !a.parentNode || a.parentNode.nodeType === 11 } function K() { return !0 } function J() { return !1 } function n(a, b, c) { var d = b + "defer", e = b + "queue", g = b + "mark", h = f._data(a, d); h && (c === "queue" || !f._data(a, e)) && (c === "mark" || !f._data(a, g)) && setTimeout(function () { !f._data(a, e) && !f._data(a, g) && (f.removeData(a, d, !0), h.fire()) }, 0) } function m(a) { for (var b in a) { if (b === "data" && f.isEmptyObject(a[b])) continue; if (b !== "toJSON") return !1 } return !0 } function l(a, c, d) { if (d === b && a.nodeType === 1) { var e = "data-" + c.replace(k, "-$1").toLowerCase(); d = a.getAttribute(e); if (typeof d == "string") { try { d = d === "true" ? !0 : d === "false" ? !1 : d === "null" ? null : f.isNumeric(d) ? parseFloat(d) : j.test(d) ? f.parseJSON(d) : d } catch (g) { } f.data(a, c, d) } else d = b } return d } function h(a) { var b = g[a] = {}, c, d; a = a.split(/\s+/); for (c = 0, d = a.length; c < d; c++) b[a[c]] = !0; return b } var c = a.document, d = a.navigator, e = a.location, f = function () { function J() { if (!e.isReady) { try { c.documentElement.doScroll("left") } catch (a) { setTimeout(J, 1); return } e.ready() } } var e = function (a, b) { return new e.fn.init(a, b, h) }, f = a.jQuery, g = a.$, h, i = /^(?:[^#<]*(<[\w\W]+>)[^>]*$|#([\w\-]*)$)/, j = /\S/, k = /^\s+/, l = /\s+$/, m = /^<(\w+)\s*\/?>(?:<\/\1>)?$/, n = /^[\],:{}\s]*$/, o = /\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, p = /"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, q = /(?:^|:|,)(?:\s*\[)+/g, r = /(webkit)[ \/]([\w.]+)/, s = /(opera)(?:.*version)?[ \/]([\w.]+)/, t = /(msie) ([\w.]+)/, u = /(mozilla)(?:.*? rv:([\w.]+))?/, v = /-([a-z]|[0-9])/ig, w = /^-ms-/, x = function (a, b) { return (b + "").toUpperCase() }, y = d.userAgent, z, A, B, C = Object.prototype.toString, D = Object.prototype.hasOwnProperty, E = Array.prototype.push, F = Array.prototype.slice, G = String.prototype.trim, H = Array.prototype.indexOf, I = {}; e.fn = e.prototype = { constructor: e, init: function (a, d, f) { var g, h, j, k; if (!a) return this; if (a.nodeType) { this.context = this[0] = a, this.length = 1; return this } if (a === "body" && !d && c.body) { this.context = c, this[0] = c.body, this.selector = a, this.length = 1; return this } if (typeof a == "string") { a.charAt(0) !== "<" || a.charAt(a.length - 1) !== ">" || a.length < 3 ? g = i.exec(a) : g = [null, a, null]; if (g && (g[1] || !d)) { if (g[1]) { d = d instanceof e ? d[0] : d, k = d ? d.ownerDocument || d : c, j = m.exec(a), j ? e.isPlainObject(d) ? (a = [c.createElement(j[1])], e.fn.attr.call(a, d, !0)) : a = [k.createElement(j[1])] : (j = e.buildFragment([g[1]], [k]), a = (j.cacheable ? e.clone(j.fragment) : j.fragment).childNodes); return e.merge(this, a) } h = c.getElementById(g[2]); if (h && h.parentNode) { if (h.id !== g[2]) return f.find(a); this.length = 1, this[0] = h } this.context = c, this.selector = a; return this } return !d || d.jquery ? (d || f).find(a) : this.constructor(d).find(a) } if (e.isFunction(a)) return f.ready(a); a.selector !== b && (this.selector = a.selector, this.context = a.context); return e.makeArray(a, this) }, selector: "", jquery: "1.7.1", length: 0, size: function () { return this.length }, toArray: function () { return F.call(this, 0) }, get: function (a) { return a == null ? this.toArray() : a < 0 ? this[this.length + a] : this[a] }, pushStack: function (a, b, c) { var d = this.constructor(); e.isArray(a) ? E.apply(d, a) : e.merge(d, a), d.prevObject = this, d.context = this.context, b === "find" ? d.selector = this.selector + (this.selector ? " " : "") + c : b && (d.selector = this.selector + "." + b + "(" + c + ")"); return d }, each: function (a, b) { return e.each(this, a, b) }, ready: function (a) { e.bindReady(), A.add(a); return this }, eq: function (a) { a = +a; return a === -1 ? this.slice(a) : this.slice(a, a + 1) }, first: function () { return this.eq(0) }, last: function () { return this.eq(-1) }, slice: function () { return this.pushStack(F.apply(this, arguments), "slice", F.call(arguments).join(",")) }, map: function (a) { return this.pushStack(e.map(this, function (b, c) { return a.call(b, c, b) })) }, end: function () { return this.prevObject || this.constructor(null) }, push: E, sort: [].sort, splice: [].splice }, e.fn.init.prototype = e.fn, e.extend = e.fn.extend = function () { var a, c, d, f, g, h, i = arguments[0] || {}, j = 1, k = arguments.length, l = !1; typeof i == "boolean" && (l = i, i = arguments[1] || {}, j = 2), typeof i != "object" && !e.isFunction(i) && (i = {}), k === j && (i = this, --j); for (; j < k; j++) if ((a = arguments[j]) != null) for (c in a) { d = i[c], f = a[c]; if (i === f) continue; l && f && (e.isPlainObject(f) || (g = e.isArray(f))) ? (g ? (g = !1, h = d && e.isArray(d) ? d : []) : h = d && e.isPlainObject(d) ? d : {}, i[c] = e.extend(l, h, f)) : f !== b && (i[c] = f) } return i }, e.extend({ noConflict: function (b) { a.$ === e && (a.$ = g), b && a.jQuery === e && (a.jQuery = f); return e }, isReady: !1, readyWait: 1, holdReady: function (a) { a ? e.readyWait++ : e.ready(!0) }, ready: function (a) { if (a === !0 && ! --e.readyWait || a !== !0 && !e.isReady) { if (!c.body) return setTimeout(e.ready, 1); e.isReady = !0; if (a !== !0 && --e.readyWait > 0) return; A.fireWith(c, [e]), e.fn.trigger && e(c).trigger("ready").off("ready") } }, bindReady: function () { if (!A) { A = e.Callbacks("once memory"); if (c.readyState === "complete") return setTimeout(e.ready, 1); if (c.addEventListener) c.addEventListener("DOMContentLoaded", B, !1), a.addEventListener("load", e.ready, !1); else if (c.attachEvent) { c.attachEvent("onreadystatechange", B), a.attachEvent("onload", e.ready); var b = !1; try { b = a.frameElement == null } catch (d) { } c.documentElement.doScroll && b && J() } } }, isFunction: function (a) { return e.type(a) === "function" }, isArray: Array.isArray || function (a) { return e.type(a) === "array" }, isWindow: function (a) { return a && typeof a == "object" && "setInterval" in a }, isNumeric: function (a) { return !isNaN(parseFloat(a)) && isFinite(a) }, type: function (a) { return a == null ? String(a) : I[C.call(a)] || "object" }, isPlainObject: function (a) { if (!a || e.type(a) !== "object" || a.nodeType || e.isWindow(a)) return !1; try { if (a.constructor && !D.call(a, "constructor") && !D.call(a.constructor.prototype, "isPrototypeOf")) return !1 } catch (c) { return !1 } var d; for (d in a); return d === b || D.call(a, d) }, isEmptyObject: function (a) { for (var b in a) return !1; return !0 }, error: function (a) { throw new Error(a) }, parseJSON: function (b) { if (typeof b != "string" || !b) return null; b = e.trim(b); if (a.JSON && a.JSON.parse) return a.JSON.parse(b); if (n.test(b.replace(o, "@").replace(p, "]").replace(q, ""))) return (new Function("return " + b))(); e.error("Invalid JSON: " + b) }, parseXML: function (c) { var d, f; try { a.DOMParser ? (f = new DOMParser, d = f.parseFromString(c, "text/xml")) : (d = new ActiveXObject("Microsoft.XMLDOM"), d.async = "false", d.loadXML(c)) } catch (g) { d = b } (!d || !d.documentElement || d.getElementsByTagName("parsererror").length) && e.error("Invalid XML: " + c); return d }, noop: function () { }, globalEval: function (b) { b && j.test(b) && (a.execScript || function (b) { a.eval.call(a, b) })(b) }, camelCase: function (a) { return a.replace(w, "ms-").replace(v, x) }, nodeName: function (a, b) { return a.nodeName && a.nodeName.toUpperCase() === b.toUpperCase() }, each: function (a, c, d) { var f, g = 0, h = a.length, i = h === b || e.isFunction(a); if (d) { if (i) { for (f in a) if (c.apply(a[f], d) === !1) break } else for (; g < h; ) if (c.apply(a[g++], d) === !1) break } else if (i) { for (f in a) if (c.call(a[f], f, a[f]) === !1) break } else for (; g < h; ) if (c.call(a[g], g, a[g++]) === !1) break; return a }, trim: G ? function (a) { return a == null ? "" : G.call(a) } : function (a) { return a == null ? "" : (a + "").replace(k, "").replace(l, "") }, makeArray: function (a, b) { var c = b || []; if (a != null) { var d = e.type(a); a.length == null || d === "string" || d === "function" || d === "regexp" || e.isWindow(a) ? E.call(c, a) : e.merge(c, a) } return c }, inArray: function (a, b, c) { var d; if (b) { if (H) return H.call(b, a, c); d = b.length, c = c ? c < 0 ? Math.max(0, d + c) : c : 0; for (; c < d; c++) if (c in b && b[c] === a) return c } return -1 }, merge: function (a, c) { var d = a.length, e = 0; if (typeof c.length == "number") for (var f = c.length; e < f; e++) a[d++] = c[e]; else while (c[e] !== b) a[d++] = c[e++]; a.length = d; return a }, grep: function (a, b, c) { var d = [], e; c = !!c; for (var f = 0, g = a.length; f < g; f++) e = !!b(a[f], f), c !== e && d.push(a[f]); return d }, map: function (a, c, d) { var f, g, h = [], i = 0, j = a.length, k = a instanceof e || j !== b && typeof j == "number" && (j > 0 && a[0] && a[j - 1] || j === 0 || e.isArray(a)); if (k) for (; i < j; i++) f = c(a[i], i, d), f != null && (h[h.length] = f); else for (g in a) f = c(a[g], g, d), f != null && (h[h.length] = f); return h.concat.apply([], h) }, guid: 1, proxy: function (a, c) { if (typeof c == "string") { var d = a[c]; c = a, a = d } if (!e.isFunction(a)) return b; var f = F.call(arguments, 2), g = function () { return a.apply(c, f.concat(F.call(arguments))) }; g.guid = a.guid = a.guid || g.guid || e.guid++; return g }, access: function (a, c, d, f, g, h) { var i = a.length; if (typeof c == "object") { for (var j in c) e.access(a, j, c[j], f, g, d); return a } if (d !== b) { f = !h && f && e.isFunction(d); for (var k = 0; k < i; k++) g(a[k], c, f ? d.call(a[k], k, g(a[k], c)) : d, h); return a } return i ? g(a[0], c) : b }, now: function () { return (new Date).getTime() }, uaMatch: function (a) { a = a.toLowerCase(); var b = r.exec(a) || s.exec(a) || t.exec(a) || a.indexOf("compatible") < 0 && u.exec(a) || []; return { browser: b[1] || "", version: b[2] || "0"} }, sub: function () { function a(b, c) { return new a.fn.init(b, c) } e.extend(!0, a, this), a.superclass = this, a.fn = a.prototype = this(), a.fn.constructor = a, a.sub = this.sub, a.fn.init = function (d, f) { f && f instanceof e && !(f instanceof a) && (f = a(f)); return e.fn.init.call(this, d, f, b) }, a.fn.init.prototype = a.fn; var b = a(c); return a }, browser: {} }), e.each("Boolean Number String Function Array Date RegExp Object".split(" "), function (a, b) { I["[object " + b + "]"] = b.toLowerCase() }), z = e.uaMatch(y), z.browser && (e.browser[z.browser] = !0, e.browser.version = z.version), e.browser.webkit && (e.browser.safari = !0), j.test(" ") && (k = /^[\s\xA0]+/, l = /[\s\xA0]+$/), h = e(c), c.addEventListener ? B = function () { c.removeEventListener("DOMContentLoaded", B, !1), e.ready() } : c.attachEvent && (B = function () { c.readyState === "complete" && (c.detachEvent("onreadystatechange", B), e.ready()) }); return e } (), g = {}; f.Callbacks = function (a) { a = a ? g[a] || h(a) : {}; var c = [], d = [], e, i, j, k, l, m = function (b) { var d, e, g, h, i; for (d = 0, e = b.length; d < e; d++) g = b[d], h = f.type(g), h === "array" ? m(g) : h === "function" && (!a.unique || !o.has(g)) && c.push(g) }, n = function (b, f) { f = f || [], e = !a.memory || [b, f], i = !0, l = j || 0, j = 0, k = c.length; for (; c && l < k; l++) if (c[l].apply(b, f) === !1 && a.stopOnFalse) { e = !0; break } i = !1, c && (a.once ? e === !0 ? o.disable() : c = [] : d && d.length && (e = d.shift(), o.fireWith(e[0], e[1]))) }, o = { add: function () { if (c) { var a = c.length; m(arguments), i ? k = c.length : e && e !== !0 && (j = a, n(e[0], e[1])) } return this }, remove: function () { if (c) { var b = arguments, d = 0, e = b.length; for (; d < e; d++) for (var f = 0; f < c.length; f++) if (b[d] === c[f]) { i && f <= k && (k--, f <= l && l--), c.splice(f--, 1); if (a.unique) break } } return this }, has: function (a) { if (c) { var b = 0, d = c.length; for (; b < d; b++) if (a === c[b]) return !0 } return !1 }, empty: function () { c = []; return this }, disable: function () { c = d = e = b; return this }, disabled: function () { return !c }, lock: function () { d = b, (!e || e === !0) && o.disable(); return this }, locked: function () { return !d }, fireWith: function (b, c) { d && (i ? a.once || d.push([b, c]) : (!a.once || !e) && n(b, c)); return this }, fire: function () { o.fireWith(this, arguments); return this }, fired: function () { return !!e } }; return o }; var i = [].slice; f.extend({ Deferred: function (a) { var b = f.Callbacks("once memory"), c = f.Callbacks("once memory"), d = f.Callbacks("memory"), e = "pending", g = { resolve: b, reject: c, notify: d }, h = { done: b.add, fail: c.add, progress: d.add, state: function () { return e }, isResolved: b.fired, isRejected: c.fired, then: function (a, b, c) { i.done(a).fail(b).progress(c); return this }, always: function () { i.done.apply(i, arguments).fail.apply(i, arguments); return this }, pipe: function (a, b, c) { return f.Deferred(function (d) { f.each({ done: [a, "resolve"], fail: [b, "reject"], progress: [c, "notify"] }, function (a, b) { var c = b[0], e = b[1], g; f.isFunction(c) ? i[a](function () { g = c.apply(this, arguments), g && f.isFunction(g.promise) ? g.promise().then(d.resolve, d.reject, d.notify) : d[e + "With"](this === i ? d : this, [g]) }) : i[a](d[e]) }) }).promise() }, promise: function (a) { if (a == null) a = h; else for (var b in h) a[b] = h[b]; return a } }, i = h.promise({}), j; for (j in g) i[j] = g[j].fire, i[j + "With"] = g[j].fireWith; i.done(function () { e = "resolved" }, c.disable, d.lock).fail(function () { e = "rejected" }, b.disable, d.lock), a && a.call(i, i); return i }, when: function (a) { function m(a) { return function (b) { e[a] = arguments.length > 1 ? i.call(arguments, 0) : b, j.notifyWith(k, e) } } function l(a) { return function (c) { b[a] = arguments.length > 1 ? i.call(arguments, 0) : c, --g || j.resolveWith(j, b) } } var b = i.call(arguments, 0), c = 0, d = b.length, e = Array(d), g = d, h = d, j = d <= 1 && a && f.isFunction(a.promise) ? a : f.Deferred(), k = j.promise(); if (d > 1) { for (; c < d; c++) b[c] && b[c].promise && f.isFunction(b[c].promise) ? b[c].promise().then(l(c), j.reject, m(c)) : --g; g || j.resolveWith(j, b) } else j !== a && j.resolveWith(j, d ? [a] : []); return k } }), f.support = function () { var b, d, e, g, h, i, j, k, l, m, n, o, p, q = c.createElement("div"), r = c.documentElement; q.setAttribute("className", "t"), q.innerHTML = "
a", d = q.getElementsByTagName("*"), e = q.getElementsByTagName("a")[0]; if (!d || !d.length || !e) return {}; g = c.createElement("select"), h = g.appendChild(c.createElement("option")), i = q.getElementsByTagName("input")[0], b = { leadingWhitespace: q.firstChild.nodeType === 3, tbody: !q.getElementsByTagName("tbody").length, htmlSerialize: !!q.getElementsByTagName("link").length, style: /top/.test(e.getAttribute("style")), hrefNormalized: e.getAttribute("href") === "/a", opacity: /^0.55/.test(e.style.opacity), cssFloat: !!e.style.cssFloat, checkOn: i.value === "on", optSelected: h.selected, getSetAttribute: q.className !== "t", enctype: !!c.createElement("form").enctype, html5Clone: c.createElement("nav").cloneNode(!0).outerHTML !== "<:nav>", submitBubbles: !0, changeBubbles: !0, focusinBubbles: !1, deleteExpando: !0, noCloneEvent: !0, inlineBlockNeedsLayout: !1, shrinkWrapBlocks: !1, reliableMarginRight: !0 }, i.checked = !0, b.noCloneChecked = i.cloneNode(!0).checked, g.disabled = !0, b.optDisabled = !h.disabled; try { delete q.test } catch (s) { b.deleteExpando = !1 } !q.addEventListener && q.attachEvent && q.fireEvent && (q.attachEvent("onclick", function () { b.noCloneEvent = !1 }), q.cloneNode(!0).fireEvent("onclick")), i = c.createElement("input"), i.value = "t", i.setAttribute("type", "radio"), b.radioValue = i.value === "t", i.setAttribute("checked", "checked"), q.appendChild(i), k = c.createDocumentFragment(), k.appendChild(q.lastChild), b.checkClone = k.cloneNode(!0).cloneNode(!0).lastChild.checked, b.appendChecked = i.checked, k.removeChild(i), k.appendChild(q), q.innerHTML = "", a.getComputedStyle && (j = c.createElement("div"), j.style.width = "0", j.style.marginRight = "0", q.style.width = "2px", q.appendChild(j), b.reliableMarginRight = (parseInt((a.getComputedStyle(j, null) || { marginRight: 0 }).marginRight, 10) || 0) === 0); if (q.attachEvent) for (o in { submit: 1, change: 1, focusin: 1 }) n = "on" + o, p = n in q, p || (q.setAttribute(n, "return;"), p = typeof q[n] == "function"), b[o + "Bubbles"] = p; k.removeChild(q), k = g = h = j = q = i = null, f(function () { var a, d, e, g, h, i, j, k, m, n, o, r = c.getElementsByTagName("body")[0]; !r || (j = 1, k = "position:absolute;top:0;left:0;width:1px;height:1px;margin:0;", m = "visibility:hidden;border:0;", n = "style='" + k + "border:5px solid #000;padding:0;'", o = "
" + "" + "
", a = c.createElement("div"), a.style.cssText = m + "width:0;height:0;position:static;top:0;margin-top:" + j + "px", r.insertBefore(a, r.firstChild), q = c.createElement("div"), a.appendChild(q), q.innerHTML = "
t
", l = q.getElementsByTagName("td"), p = l[0].offsetHeight === 0, l[0].style.display = "", l[1].style.display = "none", b.reliableHiddenOffsets = p && l[0].offsetHeight === 0, q.innerHTML = "", q.style.width = q.style.paddingLeft = "1px", f.boxModel = b.boxModel = q.offsetWidth === 2, typeof q.style.zoom != "undefined" && (q.style.display = "inline", q.style.zoom = 1, b.inlineBlockNeedsLayout = q.offsetWidth === 2, q.style.display = "", q.innerHTML = "
", b.shrinkWrapBlocks = q.offsetWidth !== 2), q.style.cssText = k + m, q.innerHTML = o, d = q.firstChild, e = d.firstChild, h = d.nextSibling.firstChild.firstChild, i = { doesNotAddBorder: e.offsetTop !== 5, doesAddBorderForTableAndCells: h.offsetTop === 5 }, e.style.position = "fixed", e.style.top = "20px", i.fixedPosition = e.offsetTop === 20 || e.offsetTop === 15, e.style.position = e.style.top = "", d.style.overflow = "hidden", d.style.position = "relative", i.subtractsBorderForOverflowNotVisible = e.offsetTop === -5, i.doesNotIncludeMarginInBodyOffset = r.offsetTop !== j, r.removeChild(a), q = a = null, f.extend(b, i)) }); return b } (); var j = /^(?:\{.*\}|\[.*\])$/, k = /([A-Z])/g; f.extend({ cache: {}, uuid: 0, expando: "jQuery" + (f.fn.jquery + Math.random()).replace(/\D/g, ""), noData: { embed: !0, object: "clsid:D27CDB6E-AE6D-11cf-96B8-444553540000", applet: !0 }, hasData: function (a) { a = a.nodeType ? f.cache[a[f.expando]] : a[f.expando]; return !!a && !m(a) }, data: function (a, c, d, e) { if (!!f.acceptData(a)) { var g, h, i, j = f.expando, k = typeof c == "string", l = a.nodeType, m = l ? f.cache : a, n = l ? a[j] : a[j] && j, o = c === "events"; if ((!n || !m[n] || !o && !e && !m[n].data) && k && d === b) return; n || (l ? a[j] = n = ++f.uuid : n = j), m[n] || (m[n] = {}, l || (m[n].toJSON = f.noop)); if (typeof c == "object" || typeof c == "function") e ? m[n] = f.extend(m[n], c) : m[n].data = f.extend(m[n].data, c); g = h = m[n], e || (h.data || (h.data = {}), h = h.data), d !== b && (h[f.camelCase(c)] = d); if (o && !h[c]) return g.events; k ? (i = h[c], i == null && (i = h[f.camelCase(c)])) : i = h; return i } }, removeData: function (a, b, c) { if (!!f.acceptData(a)) { var d, e, g, h = f.expando, i = a.nodeType, j = i ? f.cache : a, k = i ? a[h] : h; if (!j[k]) return; if (b) { d = c ? j[k] : j[k].data; if (d) { f.isArray(b) || (b in d ? b = [b] : (b = f.camelCase(b), b in d ? b = [b] : b = b.split(" "))); for (e = 0, g = b.length; e < g; e++) delete d[b[e]]; if (!(c ? m : f.isEmptyObject)(d)) return } } if (!c) { delete j[k].data; if (!m(j[k])) return } f.support.deleteExpando || !j.setInterval ? delete j[k] : j[k] = null, i && (f.support.deleteExpando ? delete a[h] : a.removeAttribute ? a.removeAttribute(h) : a[h] = null) } }, _data: function (a, b, c) { return f.data(a, b, c, !0) }, acceptData: function (a) { if (a.nodeName) { var b = f.noData[a.nodeName.toLowerCase()]; if (b) return b !== !0 && a.getAttribute("classid") === b } return !0 } }), f.fn.extend({ data: function (a, c) { var d, e, g, h = null; if (typeof a == "undefined") { if (this.length) { h = f.data(this[0]); if (this[0].nodeType === 1 && !f._data(this[0], "parsedAttrs")) { e = this[0].attributes; for (var i = 0, j = e.length; i < j; i++) g = e[i].name, g.indexOf("data-") === 0 && (g = f.camelCase(g.substring(5)), l(this[0], g, h[g])); f._data(this[0], "parsedAttrs", !0) } } return h } if (typeof a == "object") return this.each(function () { f.data(this, a) }); d = a.split("."), d[1] = d[1] ? "." + d[1] : ""; if (c === b) { h = this.triggerHandler("getData" + d[1] + "!", [d[0]]), h === b && this.length && (h = f.data(this[0], a), h = l(this[0], a, h)); return h === b && d[1] ? this.data(d[0]) : h } return this.each(function () { var b = f(this), e = [d[0], c]; b.triggerHandler("setData" + d[1] + "!", e), f.data(this, a, c), b.triggerHandler("changeData" + d[1] + "!", e) }) }, removeData: function (a) { return this.each(function () { f.removeData(this, a) }) } }), f.extend({ _mark: function (a, b) { a && (b = (b || "fx") + "mark", f._data(a, b, (f._data(a, b) || 0) + 1)) }, _unmark: function (a, b, c) { a !== !0 && (c = b, b = a, a = !1); if (b) { c = c || "fx"; var d = c + "mark", e = a ? 0 : (f._data(b, d) || 1) - 1; e ? f._data(b, d, e) : (f.removeData(b, d, !0), n(b, c, "mark")) } }, queue: function (a, b, c) { var d; if (a) { b = (b || "fx") + "queue", d = f._data(a, b), c && (!d || f.isArray(c) ? d = f._data(a, b, f.makeArray(c)) : d.push(c)); return d || [] } }, dequeue: function (a, b) { b = b || "fx"; var c = f.queue(a, b), d = c.shift(), e = {}; d === "inprogress" && (d = c.shift()), d && (b === "fx" && c.unshift("inprogress"), f._data(a, b + ".run", e), d.call(a, function () { f.dequeue(a, b) }, e)), c.length || (f.removeData(a, b + "queue " + b + ".run", !0), n(a, b, "queue")) } }), f.fn.extend({ queue: function (a, c) { typeof a != "string" && (c = a, a = "fx"); if (c === b) return f.queue(this[0], a); return this.each(function () { var b = f.queue(this, a, c); a === "fx" && b[0] !== "inprogress" && f.dequeue(this, a) }) }, dequeue: function (a) { return this.each(function () { f.dequeue(this, a) }) }, delay: function (a, b) { a = f.fx ? f.fx.speeds[a] || a : a, b = b || "fx"; return this.queue(b, function (b, c) { var d = setTimeout(b, a); c.stop = function () { clearTimeout(d) } }) }, clearQueue: function (a) { return this.queue(a || "fx", []) }, promise: function (a, c) { function m() { --h || d.resolveWith(e, [e]) } typeof a != "string" && (c = a, a = b), a = a || "fx"; var d = f.Deferred(), e = this, g = e.length, h = 1, i = a + "defer", j = a + "queue", k = a + "mark", l; while (g--) if (l = f.data(e[g], i, b, !0) || (f.data(e[g], j, b, !0) || f.data(e[g], k, b, !0)) && f.data(e[g], i, f.Callbacks("once memory"), !0)) h++, l.add(m); m(); return d.promise() } }); var o = /[\n\t\r]/g, p = /\s+/, q = /\r/g, r = /^(?:button|input)$/i, s = /^(?:button|input|object|select|textarea)$/i, t = /^a(?:rea)?$/i, u = /^(?:autofocus|autoplay|async|checked|controls|defer|disabled|hidden|loop|multiple|open|readonly|required|scoped|selected)$/i, v = f.support.getSetAttribute, w, x, y; f.fn.extend({ attr: function (a, b) { return f.access(this, a, b, !0, f.attr) }, removeAttr: function (a) { return this.each(function () { f.removeAttr(this, a) }) }, prop: function (a, b) { return f.access(this, a, b, !0, f.prop) }, removeProp: function (a) { a = f.propFix[a] || a; return this.each(function () { try { this[a] = b, delete this[a] } catch (c) { } }) }, addClass: function (a) { var b, c, d, e, g, h, i; if (f.isFunction(a)) return this.each(function (b) { f(this).addClass(a.call(this, b, this.className)) }); if (a && typeof a == "string") { b = a.split(p); for (c = 0, d = this.length; c < d; c++) { e = this[c]; if (e.nodeType === 1) if (!e.className && b.length === 1) e.className = a; else { g = " " + e.className + " "; for (h = 0, i = b.length; h < i; h++) ~g.indexOf(" " + b[h] + " ") || (g += b[h] + " "); e.className = f.trim(g) } } } return this }, removeClass: function (a) { var c, d, e, g, h, i, j; if (f.isFunction(a)) return this.each(function (b) { f(this).removeClass(a.call(this, b, this.className)) }); if (a && typeof a == "string" || a === b) { c = (a || "").split(p); for (d = 0, e = this.length; d < e; d++) { g = this[d]; if (g.nodeType === 1 && g.className) if (a) { h = (" " + g.className + " ").replace(o, " "); for (i = 0, j = c.length; i < j; i++) h = h.replace(" " + c[i] + " ", " "); g.className = f.trim(h) } else g.className = "" } } return this }, toggleClass: function (a, b) { var c = typeof a, d = typeof b == "boolean"; if (f.isFunction(a)) return this.each(function (c) { f(this).toggleClass(a.call(this, c, this.className, b), b) }); return this.each(function () { if (c === "string") { var e, g = 0, h = f(this), i = b, j = a.split(p); while (e = j[g++]) i = d ? i : !h.hasClass(e), h[i ? "addClass" : "removeClass"](e) } else if (c === "undefined" || c === "boolean") this.className && f._data(this, "__className__", this.className), this.className = this.className || a === !1 ? "" : f._data(this, "__className__") || "" }) }, hasClass: function (a) { var b = " " + a + " ", c = 0, d = this.length; for (; c < d; c++) if (this[c].nodeType === 1 && (" " + this[c].className + " ").replace(o, " ").indexOf(b) > -1) return !0; return !1 }, val: function (a) { var c, d, e, g = this[0]; { if (!!arguments.length) { e = f.isFunction(a); return this.each(function (d) { var g = f(this), h; if (this.nodeType === 1) { e ? h = a.call(this, d, g.val()) : h = a, h == null ? h = "" : typeof h == "number" ? h += "" : f.isArray(h) && (h = f.map(h, function (a) { return a == null ? "" : a + "" })), c = f.valHooks[this.nodeName.toLowerCase()] || f.valHooks[this.type]; if (!c || !("set" in c) || c.set(this, h, "value") === b) this.value = h } }) } if (g) { c = f.valHooks[g.nodeName.toLowerCase()] || f.valHooks[g.type]; if (c && "get" in c && (d = c.get(g, "value")) !== b) return d; d = g.value; return typeof d == "string" ? d.replace(q, "") : d == null ? "" : d } } } }), f.extend({ valHooks: { option: { get: function (a) { var b = a.attributes.value; return !b || b.specified ? a.value : a.text } }, select: { get: function (a) { var b, c, d, e, g = a.selectedIndex, h = [], i = a.options, j = a.type === "select-one"; if (g < 0) return null; c = j ? g : 0, d = j ? g + 1 : i.length; for (; c < d; c++) { e = i[c]; if (e.selected && (f.support.optDisabled ? !e.disabled : e.getAttribute("disabled") === null) && (!e.parentNode.disabled || !f.nodeName(e.parentNode, "optgroup"))) { b = f(e).val(); if (j) return b; h.push(b) } } if (j && !h.length && i.length) return f(i[g]).val(); return h }, set: function (a, b) { var c = f.makeArray(b); f(a).find("option").each(function () { this.selected = f.inArray(f(this).val(), c) >= 0 }), c.length || (a.selectedIndex = -1); return c } } }, attrFn: { val: !0, css: !0, html: !0, text: !0, data: !0, width: !0, height: !0, offset: !0 }, attr: function (a, c, d, e) { var g, h, i, j = a.nodeType; if (!!a && j !== 3 && j !== 8 && j !== 2) { if (e && c in f.attrFn) return f(a)[c](d); if (typeof a.getAttribute == "undefined") return f.prop(a, c, d); i = j !== 1 || !f.isXMLDoc(a), i && (c = c.toLowerCase(), h = f.attrHooks[c] || (u.test(c) ? x : w)); if (d !== b) { if (d === null) { f.removeAttr(a, c); return } if (h && "set" in h && i && (g = h.set(a, d, c)) !== b) return g; a.setAttribute(c, "" + d); return d } if (h && "get" in h && i && (g = h.get(a, c)) !== null) return g; g = a.getAttribute(c); return g === null ? b : g } }, removeAttr: function (a, b) { var c, d, e, g, h = 0; if (b && a.nodeType === 1) { d = b.toLowerCase().split(p), g = d.length; for (; h < g; h++) e = d[h], e && (c = f.propFix[e] || e, f.attr(a, e, ""), a.removeAttribute(v ? e : c), u.test(e) && c in a && (a[c] = !1)) } }, attrHooks: { type: { set: function (a, b) { if (r.test(a.nodeName) && a.parentNode) f.error("type property can't be changed"); else if (!f.support.radioValue && b === "radio" && f.nodeName(a, "input")) { var c = a.value; a.setAttribute("type", b), c && (a.value = c); return b } } }, value: { get: function (a, b) { if (w && f.nodeName(a, "button")) return w.get(a, b); return b in a ? a.value : null }, set: function (a, b, c) { if (w && f.nodeName(a, "button")) return w.set(a, b, c); a.value = b } } }, propFix: { tabindex: "tabIndex", readonly: "readOnly", "for": "htmlFor", "class": "className", maxlength: "maxLength", cellspacing: "cellSpacing", cellpadding: "cellPadding", rowspan: "rowSpan", colspan: "colSpan", usemap: "useMap", frameborder: "frameBorder", contenteditable: "contentEditable" }, prop: function (a, c, d) { var e, g, h, i = a.nodeType; if (!!a && i !== 3 && i !== 8 && i !== 2) { h = i !== 1 || !f.isXMLDoc(a), h && (c = f.propFix[c] || c, g = f.propHooks[c]); return d !== b ? g && "set" in g && (e = g.set(a, d, c)) !== b ? e : a[c] = d : g && "get" in g && (e = g.get(a, c)) !== null ? e : a[c] } }, propHooks: { tabIndex: { get: function (a) { var c = a.getAttributeNode("tabindex"); return c && c.specified ? parseInt(c.value, 10) : s.test(a.nodeName) || t.test(a.nodeName) && a.href ? 0 : b } }} }), f.attrHooks.tabindex = f.propHooks.tabIndex, x = { get: function (a, c) { var d, e = f.prop(a, c); return e === !0 || typeof e != "boolean" && (d = a.getAttributeNode(c)) && d.nodeValue !== !1 ? c.toLowerCase() : b }, set: function (a, b, c) { var d; b === !1 ? f.removeAttr(a, c) : (d = f.propFix[c] || c, d in a && (a[d] = !0), a.setAttribute(c, c.toLowerCase())); return c } }, v || (y = { name: !0, id: !0 }, w = f.valHooks.button = { get: function (a, c) { var d; d = a.getAttributeNode(c); return d && (y[c] ? d.nodeValue !== "" : d.specified) ? d.nodeValue : b }, set: function (a, b, d) { var e = a.getAttributeNode(d); e || (e = c.createAttribute(d), a.setAttributeNode(e)); return e.nodeValue = b + "" } }, f.attrHooks.tabindex.set = w.set, f.each(["width", "height"], function (a, b) { f.attrHooks[b] = f.extend(f.attrHooks[b], { set: function (a, c) { if (c === "") { a.setAttribute(b, "auto"); return c } } }) }), f.attrHooks.contenteditable = { get: w.get, set: function (a, b, c) { b === "" && (b = "false"), w.set(a, b, c) } }), f.support.hrefNormalized || f.each(["href", "src", "width", "height"], function (a, c) { f.attrHooks[c] = f.extend(f.attrHooks[c], { get: function (a) { var d = a.getAttribute(c, 2); return d === null ? b : d } }) }), f.support.style || (f.attrHooks.style = { get: function (a) { return a.style.cssText.toLowerCase() || b }, set: function (a, b) { return a.style.cssText = "" + b } }), f.support.optSelected || (f.propHooks.selected = f.extend(f.propHooks.selected, { get: function (a) { var b = a.parentNode; b && (b.selectedIndex, b.parentNode && b.parentNode.selectedIndex); return null } })), f.support.enctype || (f.propFix.enctype = "encoding"), f.support.checkOn || f.each(["radio", "checkbox"], function () { f.valHooks[this] = { get: function (a) { return a.getAttribute("value") === null ? "on" : a.value } } }), f.each(["radio", "checkbox"], function () { f.valHooks[this] = f.extend(f.valHooks[this], { set: function (a, b) { if (f.isArray(b)) return a.checked = f.inArray(f(a).val(), b) >= 0 } }) }); var z = /^(?:textarea|input|select)$/i, A = /^([^\.]*)?(?:\.(.+))?$/, B = /\bhover(\.\S+)?\b/, C = /^key/, D = /^(?:mouse|contextmenu)|click/, E = /^(?:focusinfocus|focusoutblur)$/, F = /^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/, G = function (a) { var b = F.exec(a); b && (b[1] = (b[1] || "").toLowerCase(), b[3] = b[3] && new RegExp("(?:^|\\s)" + b[3] + "(?:\\s|$)")); return b }, H = function (a, b) { var c = a.attributes || {}; return (!b[1] || a.nodeName.toLowerCase() === b[1]) && (!b[2] || (c.id || {}).value === b[2]) && (!b[3] || b[3].test((c["class"] || {}).value)) }, I = function (a) { return f.event.special.hover ? a : a.replace(B, "mouseenter$1 mouseleave$1") }; + f.event = { add: function (a, c, d, e, g) { var h, i, j, k, l, m, n, o, p, q, r, s; if (!(a.nodeType === 3 || a.nodeType === 8 || !c || !d || !(h = f._data(a)))) { d.handler && (p = d, d = p.handler), d.guid || (d.guid = f.guid++), j = h.events, j || (h.events = j = {}), i = h.handle, i || (h.handle = i = function (a) { return typeof f != "undefined" && (!a || f.event.triggered !== a.type) ? f.event.dispatch.apply(i.elem, arguments) : b }, i.elem = a), c = f.trim(I(c)).split(" "); for (k = 0; k < c.length; k++) { l = A.exec(c[k]) || [], m = l[1], n = (l[2] || "").split(".").sort(), s = f.event.special[m] || {}, m = (g ? s.delegateType : s.bindType) || m, s = f.event.special[m] || {}, o = f.extend({ type: m, origType: l[1], data: e, handler: d, guid: d.guid, selector: g, quick: G(g), namespace: n.join(".") }, p), r = j[m]; if (!r) { r = j[m] = [], r.delegateCount = 0; if (!s.setup || s.setup.call(a, e, n, i) === !1) a.addEventListener ? a.addEventListener(m, i, !1) : a.attachEvent && a.attachEvent("on" + m, i) } s.add && (s.add.call(a, o), o.handler.guid || (o.handler.guid = d.guid)), g ? r.splice(r.delegateCount++, 0, o) : r.push(o), f.event.global[m] = !0 } a = null } }, global: {}, remove: function (a, b, c, d, e) { var g = f.hasData(a) && f._data(a), h, i, j, k, l, m, n, o, p, q, r, s; if (!!g && !!(o = g.events)) { b = f.trim(I(b || "")).split(" "); for (h = 0; h < b.length; h++) { i = A.exec(b[h]) || [], j = k = i[1], l = i[2]; if (!j) { for (j in o) f.event.remove(a, j + b[h], c, d, !0); continue } p = f.event.special[j] || {}, j = (d ? p.delegateType : p.bindType) || j, r = o[j] || [], m = r.length, l = l ? new RegExp("(^|\\.)" + l.split(".").sort().join("\\.(?:.*\\.)?") + "(\\.|$)") : null; for (n = 0; n < r.length; n++) s = r[n], (e || k === s.origType) && (!c || c.guid === s.guid) && (!l || l.test(s.namespace)) && (!d || d === s.selector || d === "**" && s.selector) && (r.splice(n--, 1), s.selector && r.delegateCount--, p.remove && p.remove.call(a, s)); r.length === 0 && m !== r.length && ((!p.teardown || p.teardown.call(a, l) === !1) && f.removeEvent(a, j, g.handle), delete o[j]) } f.isEmptyObject(o) && (q = g.handle, q && (q.elem = null), f.removeData(a, ["events", "handle"], !0)) } }, customEvent: { getData: !0, setData: !0, changeData: !0 }, trigger: function (c, d, e, g) { if (!e || e.nodeType !== 3 && e.nodeType !== 8) { var h = c.type || c, i = [], j, k, l, m, n, o, p, q, r, s; if (E.test(h + f.event.triggered)) return; h.indexOf("!") >= 0 && (h = h.slice(0, -1), k = !0), h.indexOf(".") >= 0 && (i = h.split("."), h = i.shift(), i.sort()); if ((!e || f.event.customEvent[h]) && !f.event.global[h]) return; c = typeof c == "object" ? c[f.expando] ? c : new f.Event(h, c) : new f.Event(h), c.type = h, c.isTrigger = !0, c.exclusive = k, c.namespace = i.join("."), c.namespace_re = c.namespace ? new RegExp("(^|\\.)" + i.join("\\.(?:.*\\.)?") + "(\\.|$)") : null, o = h.indexOf(":") < 0 ? "on" + h : ""; if (!e) { j = f.cache; for (l in j) j[l].events && j[l].events[h] && f.event.trigger(c, d, j[l].handle.elem, !0); return } c.result = b, c.target || (c.target = e), d = d != null ? f.makeArray(d) : [], d.unshift(c), p = f.event.special[h] || {}; if (p.trigger && p.trigger.apply(e, d) === !1) return; r = [[e, p.bindType || h]]; if (!g && !p.noBubble && !f.isWindow(e)) { s = p.delegateType || h, m = E.test(s + h) ? e : e.parentNode, n = null; for (; m; m = m.parentNode) r.push([m, s]), n = m; n && n === e.ownerDocument && r.push([n.defaultView || n.parentWindow || a, s]) } for (l = 0; l < r.length && !c.isPropagationStopped(); l++) m = r[l][0], c.type = r[l][1], q = (f._data(m, "events") || {})[c.type] && f._data(m, "handle"), q && q.apply(m, d), q = o && m[o], q && f.acceptData(m) && q.apply(m, d) === !1 && c.preventDefault(); c.type = h, !g && !c.isDefaultPrevented() && (!p._default || p._default.apply(e.ownerDocument, d) === !1) && (h !== "click" || !f.nodeName(e, "a")) && f.acceptData(e) && o && e[h] && (h !== "focus" && h !== "blur" || c.target.offsetWidth !== 0) && !f.isWindow(e) && (n = e[o], n && (e[o] = null), f.event.triggered = h, e[h](), f.event.triggered = b, n && (e[o] = n)); return c.result } }, dispatch: function (c) { c = f.event.fix(c || a.event); var d = (f._data(this, "events") || {})[c.type] || [], e = d.delegateCount, g = [].slice.call(arguments, 0), h = !c.exclusive && !c.namespace, i = [], j, k, l, m, n, o, p, q, r, s, t; g[0] = c, c.delegateTarget = this; if (e && !c.target.disabled && (!c.button || c.type !== "click")) { m = f(this), m.context = this.ownerDocument || this; for (l = c.target; l != this; l = l.parentNode || this) { o = {}, q = [], m[0] = l; for (j = 0; j < e; j++) r = d[j], s = r.selector, o[s] === b && (o[s] = r.quick ? H(l, r.quick) : m.is(s)), o[s] && q.push(r); q.length && i.push({ elem: l, matches: q }) } } d.length > e && i.push({ elem: this, matches: d.slice(e) }); for (j = 0; j < i.length && !c.isPropagationStopped(); j++) { p = i[j], c.currentTarget = p.elem; for (k = 0; k < p.matches.length && !c.isImmediatePropagationStopped(); k++) { r = p.matches[k]; if (h || !c.namespace && !r.namespace || c.namespace_re && c.namespace_re.test(r.namespace)) c.data = r.data, c.handleObj = r, n = ((f.event.special[r.origType] || {}).handle || r.handler).apply(p.elem, g), n !== b && (c.result = n, n === !1 && (c.preventDefault(), c.stopPropagation())) } } return c.result }, props: "attrChange attrName relatedNode srcElement altKey bubbles cancelable ctrlKey currentTarget eventPhase metaKey relatedTarget shiftKey target timeStamp view which".split(" "), fixHooks: {}, keyHooks: { props: "char charCode key keyCode".split(" "), filter: function (a, b) { a.which == null && (a.which = b.charCode != null ? b.charCode : b.keyCode); return a } }, mouseHooks: { props: "button buttons clientX clientY fromElement offsetX offsetY pageX pageY screenX screenY toElement".split(" "), filter: function (a, d) { var e, f, g, h = d.button, i = d.fromElement; a.pageX == null && d.clientX != null && (e = a.target.ownerDocument || c, f = e.documentElement, g = e.body, a.pageX = d.clientX + (f && f.scrollLeft || g && g.scrollLeft || 0) - (f && f.clientLeft || g && g.clientLeft || 0), a.pageY = d.clientY + (f && f.scrollTop || g && g.scrollTop || 0) - (f && f.clientTop || g && g.clientTop || 0)), !a.relatedTarget && i && (a.relatedTarget = i === a.target ? d.toElement : i), !a.which && h !== b && (a.which = h & 1 ? 1 : h & 2 ? 3 : h & 4 ? 2 : 0); return a } }, fix: function (a) { if (a[f.expando]) return a; var d, e, g = a, h = f.event.fixHooks[a.type] || {}, i = h.props ? this.props.concat(h.props) : this.props; a = f.Event(g); for (d = i.length; d; ) e = i[--d], a[e] = g[e]; a.target || (a.target = g.srcElement || c), a.target.nodeType === 3 && (a.target = a.target.parentNode), a.metaKey === b && (a.metaKey = a.ctrlKey); return h.filter ? h.filter(a, g) : a }, special: { ready: { setup: f.bindReady }, load: { noBubble: !0 }, focus: { delegateType: "focusin" }, blur: { delegateType: "focusout" }, beforeunload: { setup: function (a, b, c) { f.isWindow(this) && (this.onbeforeunload = c) }, teardown: function (a, b) { this.onbeforeunload === b && (this.onbeforeunload = null) } } }, simulate: function (a, b, c, d) { var e = f.extend(new f.Event, c, { type: a, isSimulated: !0, originalEvent: {} }); d ? f.event.trigger(e, null, b) : f.event.dispatch.call(b, e), e.isDefaultPrevented() && c.preventDefault() } }, f.event.handle = f.event.dispatch, f.removeEvent = c.removeEventListener ? function (a, b, c) { a.removeEventListener && a.removeEventListener(b, c, !1) } : function (a, b, c) { a.detachEvent && a.detachEvent("on" + b, c) }, f.Event = function (a, b) { if (!(this instanceof f.Event)) return new f.Event(a, b); a && a.type ? (this.originalEvent = a, this.type = a.type, this.isDefaultPrevented = a.defaultPrevented || a.returnValue === !1 || a.getPreventDefault && a.getPreventDefault() ? K : J) : this.type = a, b && f.extend(this, b), this.timeStamp = a && a.timeStamp || f.now(), this[f.expando] = !0 }, f.Event.prototype = { preventDefault: function () { this.isDefaultPrevented = K; var a = this.originalEvent; !a || (a.preventDefault ? a.preventDefault() : a.returnValue = !1) }, stopPropagation: function () { this.isPropagationStopped = K; var a = this.originalEvent; !a || (a.stopPropagation && a.stopPropagation(), a.cancelBubble = !0) }, stopImmediatePropagation: function () { this.isImmediatePropagationStopped = K, this.stopPropagation() }, isDefaultPrevented: J, isPropagationStopped: J, isImmediatePropagationStopped: J }, f.each({ mouseenter: "mouseover", mouseleave: "mouseout" }, function (a, b) { f.event.special[a] = { delegateType: b, bindType: b, handle: function (a) { var c = this, d = a.relatedTarget, e = a.handleObj, g = e.selector, h; if (!d || d !== c && !f.contains(c, d)) a.type = e.origType, h = e.handler.apply(this, arguments), a.type = b; return h } } }), f.support.submitBubbles || (f.event.special.submit = { setup: function () { if (f.nodeName(this, "form")) return !1; f.event.add(this, "click._submit keypress._submit", function (a) { var c = a.target, d = f.nodeName(c, "input") || f.nodeName(c, "button") ? c.form : b; d && !d._submit_attached && (f.event.add(d, "submit._submit", function (a) { this.parentNode && !a.isTrigger && f.event.simulate("submit", this.parentNode, a, !0) }), d._submit_attached = !0) }) }, teardown: function () { if (f.nodeName(this, "form")) return !1; f.event.remove(this, "._submit") } }), f.support.changeBubbles || (f.event.special.change = { setup: function () { if (z.test(this.nodeName)) { if (this.type === "checkbox" || this.type === "radio") f.event.add(this, "propertychange._change", function (a) { a.originalEvent.propertyName === "checked" && (this._just_changed = !0) }), f.event.add(this, "click._change", function (a) { this._just_changed && !a.isTrigger && (this._just_changed = !1, f.event.simulate("change", this, a, !0)) }); return !1 } f.event.add(this, "beforeactivate._change", function (a) { var b = a.target; z.test(b.nodeName) && !b._change_attached && (f.event.add(b, "change._change", function (a) { this.parentNode && !a.isSimulated && !a.isTrigger && f.event.simulate("change", this.parentNode, a, !0) }), b._change_attached = !0) }) }, handle: function (a) { var b = a.target; if (this !== b || a.isSimulated || a.isTrigger || b.type !== "radio" && b.type !== "checkbox") return a.handleObj.handler.apply(this, arguments) }, teardown: function () { f.event.remove(this, "._change"); return z.test(this.nodeName) } }), f.support.focusinBubbles || f.each({ focus: "focusin", blur: "focusout" }, function (a, b) { var d = 0, e = function (a) { f.event.simulate(b, a.target, f.event.fix(a), !0) }; f.event.special[b] = { setup: function () { d++ === 0 && c.addEventListener(a, e, !0) }, teardown: function () { --d === 0 && c.removeEventListener(a, e, !0) } } }), f.fn.extend({ on: function (a, c, d, e, g) { var h, i; if (typeof a == "object") { typeof c != "string" && (d = c, c = b); for (i in a) this.on(i, c, d, a[i], g); return this } d == null && e == null ? (e = c, d = c = b) : e == null && (typeof c == "string" ? (e = d, d = b) : (e = d, d = c, c = b)); if (e === !1) e = J; else if (!e) return this; g === 1 && (h = e, e = function (a) { f().off(a); return h.apply(this, arguments) }, e.guid = h.guid || (h.guid = f.guid++)); return this.each(function () { f.event.add(this, a, e, d, c) }) }, one: function (a, b, c, d) { return this.on.call(this, a, b, c, d, 1) }, off: function (a, c, d) { if (a && a.preventDefault && a.handleObj) { var e = a.handleObj; f(a.delegateTarget).off(e.namespace ? e.type + "." + e.namespace : e.type, e.selector, e.handler); return this } if (typeof a == "object") { for (var g in a) this.off(g, c, a[g]); return this } if (c === !1 || typeof c == "function") d = c, c = b; d === !1 && (d = J); return this.each(function () { f.event.remove(this, a, d, c) }) }, bind: function (a, b, c) { return this.on(a, null, b, c) }, unbind: function (a, b) { return this.off(a, null, b) }, live: function (a, b, c) { f(this.context).on(a, this.selector, b, c); return this }, die: function (a, b) { f(this.context).off(a, this.selector || "**", b); return this }, delegate: function (a, b, c, d) { return this.on(b, a, c, d) }, undelegate: function (a, b, c) { return arguments.length == 1 ? this.off(a, "**") : this.off(b, a, c) }, trigger: function (a, b) { return this.each(function () { f.event.trigger(a, b, this) }) }, triggerHandler: function (a, b) { if (this[0]) return f.event.trigger(a, b, this[0], !0) }, toggle: function (a) { var b = arguments, c = a.guid || f.guid++, d = 0, e = function (c) { var e = (f._data(this, "lastToggle" + a.guid) || 0) % d; f._data(this, "lastToggle" + a.guid, e + 1), c.preventDefault(); return b[e].apply(this, arguments) || !1 }; e.guid = c; while (d < b.length) b[d++].guid = c; return this.click(e) }, hover: function (a, b) { return this.mouseenter(a).mouseleave(b || a) } }), f.each("blur focus focusin focusout load resize scroll unload click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup error contextmenu".split(" "), function (a, b) { f.fn[b] = function (a, c) { c == null && (c = a, a = null); return arguments.length > 0 ? this.on(b, null, a, c) : this.trigger(b) }, f.attrFn && (f.attrFn[b] = !0), C.test(b) && (f.event.fixHooks[b] = f.event.keyHooks), D.test(b) && (f.event.fixHooks[b] = f.event.mouseHooks) }), function () { function x(a, b, c, e, f, g) { for (var h = 0, i = e.length; h < i; h++) { var j = e[h]; if (j) { var k = !1; j = j[a]; while (j) { if (j[d] === c) { k = e[j.sizset]; break } if (j.nodeType === 1) { g || (j[d] = c, j.sizset = h); if (typeof b != "string") { if (j === b) { k = !0; break } } else if (m.filter(b, [j]).length > 0) { k = j; break } } j = j[a] } e[h] = k } } } function w(a, b, c, e, f, g) { for (var h = 0, i = e.length; h < i; h++) { var j = e[h]; if (j) { var k = !1; j = j[a]; while (j) { if (j[d] === c) { k = e[j.sizset]; break } j.nodeType === 1 && !g && (j[d] = c, j.sizset = h); if (j.nodeName.toLowerCase() === b) { k = j; break } j = j[a] } e[h] = k } } } var a = /((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^\[\]]*\]|['"][^'"]*['"]|[^\[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, d = "sizcache" + (Math.random() + "").replace(".", ""), e = 0, g = Object.prototype.toString, h = !1, i = !0, j = /\\/g, k = /\r\n/g, l = /\W/; [0, 0].sort(function () { i = !1; return 0 }); var m = function (b, d, e, f) { e = e || [], d = d || c; var h = d; if (d.nodeType !== 1 && d.nodeType !== 9) return []; if (!b || typeof b != "string") return e; var i, j, k, l, n, q, r, t, u = !0, v = m.isXML(d), w = [], x = b; do { a.exec(""), i = a.exec(x); if (i) { x = i[3], w.push(i[1]); if (i[2]) { l = i[3]; break } } } while (i); if (w.length > 1 && p.exec(b)) if (w.length === 2 && o.relative[w[0]]) j = y(w[0] + w[1], d, f); else { j = o.relative[w[0]] ? [d] : m(w.shift(), d); while (w.length) b = w.shift(), o.relative[b] && (b += w.shift()), j = y(b, j, f) } else { !f && w.length > 1 && d.nodeType === 9 && !v && o.match.ID.test(w[0]) && !o.match.ID.test(w[w.length - 1]) && (n = m.find(w.shift(), d, v), d = n.expr ? m.filter(n.expr, n.set)[0] : n.set[0]); if (d) { n = f ? { expr: w.pop(), set: s(f)} : m.find(w.pop(), w.length === 1 && (w[0] === "~" || w[0] === "+") && d.parentNode ? d.parentNode : d, v), j = n.expr ? m.filter(n.expr, n.set) : n.set, w.length > 0 ? k = s(j) : u = !1; while (w.length) q = w.pop(), r = q, o.relative[q] ? r = w.pop() : q = "", r == null && (r = d), o.relative[q](k, r, v) } else k = w = [] } k || (k = j), k || m.error(q || b); if (g.call(k) === "[object Array]") if (!u) e.push.apply(e, k); else if (d && d.nodeType === 1) for (t = 0; k[t] != null; t++) k[t] && (k[t] === !0 || k[t].nodeType === 1 && m.contains(d, k[t])) && e.push(j[t]); else for (t = 0; k[t] != null; t++) k[t] && k[t].nodeType === 1 && e.push(j[t]); else s(k, e); l && (m(l, h, e, f), m.uniqueSort(e)); return e }; m.uniqueSort = function (a) { if (u) { h = i, a.sort(u); if (h) for (var b = 1; b < a.length; b++) a[b] === a[b - 1] && a.splice(b--, 1) } return a }, m.matches = function (a, b) { return m(a, null, null, b) }, m.matchesSelector = function (a, b) { return m(b, null, null, [a]).length > 0 }, m.find = function (a, b, c) { var d, e, f, g, h, i; if (!a) return []; for (e = 0, f = o.order.length; e < f; e++) { h = o.order[e]; if (g = o.leftMatch[h].exec(a)) { i = g[1], g.splice(1, 1); if (i.substr(i.length - 1) !== "\\") { g[1] = (g[1] || "").replace(j, ""), d = o.find[h](g, b, c); if (d != null) { a = a.replace(o.match[h], ""); break } } } } d || (d = typeof b.getElementsByTagName != "undefined" ? b.getElementsByTagName("*") : []); return { set: d, expr: a} }, m.filter = function (a, c, d, e) { var f, g, h, i, j, k, l, n, p, q = a, r = [], s = c, t = c && c[0] && m.isXML(c[0]); while (a && c.length) { for (h in o.filter) if ((f = o.leftMatch[h].exec(a)) != null && f[2]) { k = o.filter[h], l = f[1], g = !1, f.splice(1, 1); if (l.substr(l.length - 1) === "\\") continue; s === r && (r = []); if (o.preFilter[h]) { f = o.preFilter[h](f, s, d, r, e, t); if (!f) g = i = !0; else if (f === !0) continue } if (f) for (n = 0; (j = s[n]) != null; n++) j && (i = k(j, f, n, s), p = e ^ i, d && i != null ? p ? g = !0 : s[n] = !1 : p && (r.push(j), g = !0)); if (i !== b) { d || (s = r), a = a.replace(o.match[h], ""); if (!g) return []; break } } if (a === q) if (g == null) m.error(a); else break; q = a } return s }, m.error = function (a) { throw new Error("Syntax error, unrecognized expression: " + a) }; var n = m.getText = function (a) { var b, c, d = a.nodeType, e = ""; if (d) { if (d === 1 || d === 9) { if (typeof a.textContent == "string") return a.textContent; if (typeof a.innerText == "string") return a.innerText.replace(k, ""); for (a = a.firstChild; a; a = a.nextSibling) e += n(a) } else if (d === 3 || d === 4) return a.nodeValue } else for (b = 0; c = a[b]; b++) c.nodeType !== 8 && (e += n(c)); return e }, o = m.selectors = { order: ["ID", "NAME", "TAG"], match: { ID: /#((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, CLASS: /\.((?:[\w\u00c0-\uFFFF\-]|\\.)+)/, NAME: /\[name=['"]*((?:[\w\u00c0-\uFFFF\-]|\\.)+)['"]*\]/, ATTR: /\[\s*((?:[\w\u00c0-\uFFFF\-]|\\.)+)\s*(?:(\S?=)\s*(?:(['"])(.*?)\3|(#?(?:[\w\u00c0-\uFFFF\-]|\\.)*)|)|)\s*\]/, TAG: /^((?:[\w\u00c0-\uFFFF\*\-]|\\.)+)/, CHILD: /:(only|nth|last|first)-child(?:\(\s*(even|odd|(?:[+\-]?\d+|(?:[+\-]?\d*)?n\s*(?:[+\-]\s*\d+)?))\s*\))?/, POS: /:(nth|eq|gt|lt|first|last|even|odd)(?:\((\d*)\))?(?=[^\-]|$)/, PSEUDO: /:((?:[\w\u00c0-\uFFFF\-]|\\.)+)(?:\((['"]?)((?:\([^\)]+\)|[^\(\)]*)+)\2\))?/ }, leftMatch: {}, attrMap: { "class": "className", "for": "htmlFor" }, attrHandle: { href: function (a) { return a.getAttribute("href") }, type: function (a) { return a.getAttribute("type") } }, relative: { "+": function (a, b) { var c = typeof b == "string", d = c && !l.test(b), e = c && !d; d && (b = b.toLowerCase()); for (var f = 0, g = a.length, h; f < g; f++) if (h = a[f]) { while ((h = h.previousSibling) && h.nodeType !== 1); a[f] = e || h && h.nodeName.toLowerCase() === b ? h || !1 : h === b } e && m.filter(b, a, !0) }, ">": function (a, b) { var c, d = typeof b == "string", e = 0, f = a.length; if (d && !l.test(b)) { b = b.toLowerCase(); for (; e < f; e++) { c = a[e]; if (c) { var g = c.parentNode; a[e] = g.nodeName.toLowerCase() === b ? g : !1 } } } else { for (; e < f; e++) c = a[e], c && (a[e] = d ? c.parentNode : c.parentNode === b); d && m.filter(b, a, !0) } }, "": function (a, b, c) { var d, f = e++, g = x; typeof b == "string" && !l.test(b) && (b = b.toLowerCase(), d = b, g = w), g("parentNode", b, f, a, d, c) }, "~": function (a, b, c) { var d, f = e++, g = x; typeof b == "string" && !l.test(b) && (b = b.toLowerCase(), d = b, g = w), g("previousSibling", b, f, a, d, c) } }, find: { ID: function (a, b, c) { if (typeof b.getElementById != "undefined" && !c) { var d = b.getElementById(a[1]); return d && d.parentNode ? [d] : [] } }, NAME: function (a, b) { if (typeof b.getElementsByName != "undefined") { var c = [], d = b.getElementsByName(a[1]); for (var e = 0, f = d.length; e < f; e++) d[e].getAttribute("name") === a[1] && c.push(d[e]); return c.length === 0 ? null : c } }, TAG: function (a, b) { if (typeof b.getElementsByTagName != "undefined") return b.getElementsByTagName(a[1]) } }, preFilter: { CLASS: function (a, b, c, d, e, f) { a = " " + a[1].replace(j, "") + " "; if (f) return a; for (var g = 0, h; (h = b[g]) != null; g++) h && (e ^ (h.className && (" " + h.className + " ").replace(/[\t\n\r]/g, " ").indexOf(a) >= 0) ? c || d.push(h) : c && (b[g] = !1)); return !1 }, ID: function (a) { return a[1].replace(j, "") }, TAG: function (a, b) { return a[1].replace(j, "").toLowerCase() }, CHILD: function (a) { if (a[1] === "nth") { a[2] || m.error(a[0]), a[2] = a[2].replace(/^\+|\s*/g, ""); var b = /(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2] === "even" && "2n" || a[2] === "odd" && "2n+1" || !/\D/.test(a[2]) && "0n+" + a[2] || a[2]); a[2] = b[1] + (b[2] || 1) - 0, a[3] = b[3] - 0 } else a[2] && m.error(a[0]); a[0] = e++; return a }, ATTR: function (a, b, c, d, e, f) { var g = a[1] = a[1].replace(j, ""); !f && o.attrMap[g] && (a[1] = o.attrMap[g]), a[4] = (a[4] || a[5] || "").replace(j, ""), a[2] === "~=" && (a[4] = " " + a[4] + " "); return a }, PSEUDO: function (b, c, d, e, f) { if (b[1] === "not") if ((a.exec(b[3]) || "").length > 1 || /^\w/.test(b[3])) b[3] = m(b[3], null, null, c); else { var g = m.filter(b[3], c, d, !0 ^ f); d || e.push.apply(e, g); return !1 } else if (o.match.POS.test(b[0]) || o.match.CHILD.test(b[0])) return !0; return b }, POS: function (a) { a.unshift(!0); return a } }, filters: { enabled: function (a) { return a.disabled === !1 && a.type !== "hidden" }, disabled: function (a) { return a.disabled === !0 }, checked: function (a) { return a.checked === !0 }, selected: function (a) { a.parentNode && a.parentNode.selectedIndex; return a.selected === !0 }, parent: function (a) { return !!a.firstChild }, empty: function (a) { return !a.firstChild }, has: function (a, b, c) { return !!m(c[3], a).length }, header: function (a) { return /h\d/i.test(a.nodeName) }, text: function (a) { var b = a.getAttribute("type"), c = a.type; return a.nodeName.toLowerCase() === "input" && "text" === c && (b === c || b === null) }, radio: function (a) { return a.nodeName.toLowerCase() === "input" && "radio" === a.type }, checkbox: function (a) { return a.nodeName.toLowerCase() === "input" && "checkbox" === a.type }, file: function (a) { return a.nodeName.toLowerCase() === "input" && "file" === a.type }, password: function (a) { return a.nodeName.toLowerCase() === "input" && "password" === a.type }, submit: function (a) { var b = a.nodeName.toLowerCase(); return (b === "input" || b === "button") && "submit" === a.type }, image: function (a) { return a.nodeName.toLowerCase() === "input" && "image" === a.type }, reset: function (a) { var b = a.nodeName.toLowerCase(); return (b === "input" || b === "button") && "reset" === a.type }, button: function (a) { var b = a.nodeName.toLowerCase(); return b === "input" && "button" === a.type || b === "button" }, input: function (a) { return /input|select|textarea|button/i.test(a.nodeName) }, focus: function (a) { return a === a.ownerDocument.activeElement } }, setFilters: { first: function (a, b) { return b === 0 }, last: function (a, b, c, d) { return b === d.length - 1 }, even: function (a, b) { return b % 2 === 0 }, odd: function (a, b) { return b % 2 === 1 }, lt: function (a, b, c) { return b < c[3] - 0 }, gt: function (a, b, c) { return b > c[3] - 0 }, nth: function (a, b, c) { return c[3] - 0 === b }, eq: function (a, b, c) { return c[3] - 0 === b } }, filter: { PSEUDO: function (a, b, c, d) { var e = b[1], f = o.filters[e]; if (f) return f(a, c, b, d); if (e === "contains") return (a.textContent || a.innerText || n([a]) || "").indexOf(b[3]) >= 0; if (e === "not") { var g = b[3]; for (var h = 0, i = g.length; h < i; h++) if (g[h] === a) return !1; return !0 } m.error(e) }, CHILD: function (a, b) { var c, e, f, g, h, i, j, k = b[1], l = a; switch (k) { case "only": case "first": while (l = l.previousSibling) if (l.nodeType === 1) return !1; if (k === "first") return !0; l = a; case "last": while (l = l.nextSibling) if (l.nodeType === 1) return !1; return !0; case "nth": c = b[2], e = b[3]; if (c === 1 && e === 0) return !0; f = b[0], g = a.parentNode; if (g && (g[d] !== f || !a.nodeIndex)) { i = 0; for (l = g.firstChild; l; l = l.nextSibling) l.nodeType === 1 && (l.nodeIndex = ++i); g[d] = f } j = a.nodeIndex - e; return c === 0 ? j === 0 : j % c === 0 && j / c >= 0 } }, ID: function (a, b) { return a.nodeType === 1 && a.getAttribute("id") === b }, TAG: function (a, b) { return b === "*" && a.nodeType === 1 || !!a.nodeName && a.nodeName.toLowerCase() === b }, CLASS: function (a, b) { return (" " + (a.className || a.getAttribute("class")) + " ").indexOf(b) > -1 }, ATTR: function (a, b) { var c = b[1], d = m.attr ? m.attr(a, c) : o.attrHandle[c] ? o.attrHandle[c](a) : a[c] != null ? a[c] : a.getAttribute(c), e = d + "", f = b[2], g = b[4]; return d == null ? f === "!=" : !f && m.attr ? d != null : f === "=" ? e === g : f === "*=" ? e.indexOf(g) >= 0 : f === "~=" ? (" " + e + " ").indexOf(g) >= 0 : g ? f === "!=" ? e !== g : f === "^=" ? e.indexOf(g) === 0 : f === "$=" ? e.substr(e.length - g.length) === g : f === "|=" ? e === g || e.substr(0, g.length + 1) === g + "-" : !1 : e && d !== !1 }, POS: function (a, b, c, d) { var e = b[2], f = o.setFilters[e]; if (f) return f(a, c, b, d) } } }, p = o.match.POS, q = function (a, b) { return "\\" + (b - 0 + 1) }; for (var r in o.match) o.match[r] = new RegExp(o.match[r].source + /(?![^\[]*\])(?![^\(]*\))/.source), o.leftMatch[r] = new RegExp(/(^(?:.|\r|\n)*?)/.source + o.match[r].source.replace(/\\(\d+)/g, q)); var s = function (a, b) { a = Array.prototype.slice.call(a, 0); if (b) { b.push.apply(b, a); return b } return a }; try { Array.prototype.slice.call(c.documentElement.childNodes, 0)[0].nodeType } catch (t) { s = function (a, b) { var c = 0, d = b || []; if (g.call(a) === "[object Array]") Array.prototype.push.apply(d, a); else if (typeof a.length == "number") for (var e = a.length; c < e; c++) d.push(a[c]); else for (; a[c]; c++) d.push(a[c]); return d } } var u, v; c.documentElement.compareDocumentPosition ? u = function (a, b) { if (a === b) { h = !0; return 0 } if (!a.compareDocumentPosition || !b.compareDocumentPosition) return a.compareDocumentPosition ? -1 : 1; return a.compareDocumentPosition(b) & 4 ? -1 : 1 } : (u = function (a, b) { if (a === b) { h = !0; return 0 } if (a.sourceIndex && b.sourceIndex) return a.sourceIndex - b.sourceIndex; var c, d, e = [], f = [], g = a.parentNode, i = b.parentNode, j = g; if (g === i) return v(a, b); if (!g) return -1; if (!i) return 1; while (j) e.unshift(j), j = j.parentNode; j = i; while (j) f.unshift(j), j = j.parentNode; c = e.length, d = f.length; for (var k = 0; k < c && k < d; k++) if (e[k] !== f[k]) return v(e[k], f[k]); return k === c ? v(a, f[k], -1) : v(e[k], b, 1) }, v = function (a, b, c) { if (a === b) return c; var d = a.nextSibling; while (d) { if (d === b) return -1; d = d.nextSibling } return 1 }), function () { var a = c.createElement("div"), d = "script" + (new Date).getTime(), e = c.documentElement; a.innerHTML = "", e.insertBefore(a, e.firstChild), c.getElementById(d) && (o.find.ID = function (a, c, d) { if (typeof c.getElementById != "undefined" && !d) { var e = c.getElementById(a[1]); return e ? e.id === a[1] || typeof e.getAttributeNode != "undefined" && e.getAttributeNode("id").nodeValue === a[1] ? [e] : b : [] } }, o.filter.ID = function (a, b) { var c = typeof a.getAttributeNode != "undefined" && a.getAttributeNode("id"); return a.nodeType === 1 && c && c.nodeValue === b }), e.removeChild(a), e = a = null } (), function () { var a = c.createElement("div"); a.appendChild(c.createComment("")), a.getElementsByTagName("*").length > 0 && (o.find.TAG = function (a, b) { var c = b.getElementsByTagName(a[1]); if (a[1] === "*") { var d = []; for (var e = 0; c[e]; e++) c[e].nodeType === 1 && d.push(c[e]); c = d } return c }), a.innerHTML = "", a.firstChild && typeof a.firstChild.getAttribute != "undefined" && a.firstChild.getAttribute("href") !== "#" && (o.attrHandle.href = function (a) { return a.getAttribute("href", 2) }), a = null } (), c.querySelectorAll && function () { var a = m, b = c.createElement("div"), d = "__sizzle__"; b.innerHTML = "

"; if (!b.querySelectorAll || b.querySelectorAll(".TEST").length !== 0) { m = function (b, e, f, g) { e = e || c; if (!g && !m.isXML(e)) { var h = /^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b); if (h && (e.nodeType === 1 || e.nodeType === 9)) { if (h[1]) return s(e.getElementsByTagName(b), f); if (h[2] && o.find.CLASS && e.getElementsByClassName) return s(e.getElementsByClassName(h[2]), f) } if (e.nodeType === 9) { if (b === "body" && e.body) return s([e.body], f); if (h && h[3]) { var i = e.getElementById(h[3]); if (!i || !i.parentNode) return s([], f); if (i.id === h[3]) return s([i], f) } try { return s(e.querySelectorAll(b), f) } catch (j) { } } else if (e.nodeType === 1 && e.nodeName.toLowerCase() !== "object") { var k = e, l = e.getAttribute("id"), n = l || d, p = e.parentNode, q = /^\s*[+~]/.test(b); l ? n = n.replace(/'/g, "\\$&") : e.setAttribute("id", n), q && p && (e = e.parentNode); try { if (!q || p) return s(e.querySelectorAll("[id='" + n + "'] " + b), f) } catch (r) { } finally { l || k.removeAttribute("id") } } } return a(b, e, f, g) }; for (var e in a) m[e] = a[e]; b = null } } (), function () { var a = c.documentElement, b = a.matchesSelector || a.mozMatchesSelector || a.webkitMatchesSelector || a.msMatchesSelector; if (b) { var d = !b.call(c.createElement("div"), "div"), e = !1; try { b.call(c.documentElement, "[test!='']:sizzle") } catch (f) { e = !0 } m.matchesSelector = function (a, c) { c = c.replace(/\=\s*([^'"\]]*)\s*\]/g, "='$1']"); if (!m.isXML(a)) try { if (e || !o.match.PSEUDO.test(c) && !/!=/.test(c)) { var f = b.call(a, c); if (f || !d || a.document && a.document.nodeType !== 11) return f } } catch (g) { } return m(c, null, null, [a]).length > 0 } } } (), function () { var a = c.createElement("div"); a.innerHTML = "
"; if (!!a.getElementsByClassName && a.getElementsByClassName("e").length !== 0) { a.lastChild.className = "e"; if (a.getElementsByClassName("e").length === 1) return; o.order.splice(1, 0, "CLASS"), o.find.CLASS = function (a, b, c) { if (typeof b.getElementsByClassName != "undefined" && !c) return b.getElementsByClassName(a[1]) }, a = null } } (), c.documentElement.contains ? m.contains = function (a, b) { return a !== b && (a.contains ? a.contains(b) : !0) } : c.documentElement.compareDocumentPosition ? m.contains = function (a, b) { return !!(a.compareDocumentPosition(b) & 16) } : m.contains = function () { return !1 }, m.isXML = function (a) { var b = (a ? a.ownerDocument || a : 0).documentElement; return b ? b.nodeName !== "HTML" : !1 }; var y = function (a, b, c) { var d, e = [], f = "", g = b.nodeType ? [b] : b; while (d = o.match.PSEUDO.exec(a)) f += d[0], a = a.replace(o.match.PSEUDO, ""); a = o.relative[a] ? a + "*" : a; for (var h = 0, i = g.length; h < i; h++) m(a, g[h], e, c); return m.filter(f, e) }; m.attr = f.attr, m.selectors.attrMap = {}, f.find = m, f.expr = m.selectors, f.expr[":"] = f.expr.filters, f.unique = m.uniqueSort, f.text = m.getText, f.isXMLDoc = m.isXML, f.contains = m.contains } (); var L = /Until$/, M = /^(?:parents|prevUntil|prevAll)/, N = /,/, O = /^.[^:#\[\.,]*$/, P = Array.prototype.slice, Q = f.expr.match.POS, R = { children: !0, contents: !0, next: !0, prev: !0 }; f.fn.extend({ find: function (a) { var b = this, c, d; if (typeof a != "string") return f(a).filter(function () { for (c = 0, d = b.length; c < d; c++) if (f.contains(b[c], this)) return !0 }); var e = this.pushStack("", "find", a), g, h, i; for (c = 0, d = this.length; c < d; c++) { g = e.length, f.find(a, this[c], e); if (c > 0) for (h = g; h < e.length; h++) for (i = 0; i < g; i++) if (e[i] === e[h]) { e.splice(h--, 1); break } } return e }, has: function (a) { var b = f(a); return this.filter(function () { for (var a = 0, c = b.length; a < c; a++) if (f.contains(this, b[a])) return !0 }) }, not: function (a) { return this.pushStack(T(this, a, !1), "not", a) }, filter: function (a) { return this.pushStack(T(this, a, !0), "filter", a) }, is: function (a) { return !!a && (typeof a == "string" ? Q.test(a) ? f(a, this.context).index(this[0]) >= 0 : f.filter(a, this).length > 0 : this.filter(a).length > 0) }, closest: function (a, b) { var c = [], d, e, g = this[0]; if (f.isArray(a)) { var h = 1; while (g && g.ownerDocument && g !== b) { for (d = 0; d < a.length; d++) f(g).is(a[d]) && c.push({ selector: a[d], elem: g, level: h }); g = g.parentNode, h++ } return c } var i = Q.test(a) || typeof a != "string" ? f(a, b || this.context) : 0; for (d = 0, e = this.length; d < e; d++) { g = this[d]; while (g) { if (i ? i.index(g) > -1 : f.find.matchesSelector(g, a)) { c.push(g); break } g = g.parentNode; if (!g || !g.ownerDocument || g === b || g.nodeType === 11) break } } c = c.length > 1 ? f.unique(c) : c; return this.pushStack(c, "closest", a) }, index: function (a) { if (!a) return this[0] && this[0].parentNode ? this.prevAll().length : -1; if (typeof a == "string") return f.inArray(this[0], f(a)); return f.inArray(a.jquery ? a[0] : a, this) }, add: function (a, b) { var c = typeof a == "string" ? f(a, b) : f.makeArray(a && a.nodeType ? [a] : a), d = f.merge(this.get(), c); return this.pushStack(S(c[0]) || S(d[0]) ? d : f.unique(d)) }, andSelf: function () { return this.add(this.prevObject) } }), f.each({ parent: function (a) { var b = a.parentNode; return b && b.nodeType !== 11 ? b : null }, parents: function (a) { return f.dir(a, "parentNode") }, parentsUntil: function (a, b, c) { return f.dir(a, "parentNode", c) }, next: function (a) { return f.nth(a, 2, "nextSibling") }, prev: function (a) { return f.nth(a, 2, "previousSibling") }, nextAll: function (a) { return f.dir(a, "nextSibling") }, prevAll: function (a) { return f.dir(a, "previousSibling") }, nextUntil: function (a, b, c) { return f.dir(a, "nextSibling", c) }, prevUntil: function (a, b, c) { return f.dir(a, "previousSibling", c) }, siblings: function (a) { return f.sibling(a.parentNode.firstChild, a) }, children: function (a) { return f.sibling(a.firstChild) }, contents: function (a) { return f.nodeName(a, "iframe") ? a.contentDocument || a.contentWindow.document : f.makeArray(a.childNodes) } }, function (a, b) { f.fn[a] = function (c, d) { var e = f.map(this, b, c); L.test(a) || (d = c), d && typeof d == "string" && (e = f.filter(d, e)), e = this.length > 1 && !R[a] ? f.unique(e) : e, (this.length > 1 || N.test(d)) && M.test(a) && (e = e.reverse()); return this.pushStack(e, a, P.call(arguments).join(",")) } }), f.extend({ filter: function (a, b, c) { c && (a = ":not(" + a + ")"); return b.length === 1 ? f.find.matchesSelector(b[0], a) ? [b[0]] : [] : f.find.matches(a, b) }, dir: function (a, c, d) { var e = [], g = a[c]; while (g && g.nodeType !== 9 && (d === b || g.nodeType !== 1 || !f(g).is(d))) g.nodeType === 1 && e.push(g), g = g[c]; return e }, nth: function (a, b, c, d) { b = b || 1; var e = 0; for (; a; a = a[c]) if (a.nodeType === 1 && ++e === b) break; return a }, sibling: function (a, b) { var c = []; for (; a; a = a.nextSibling) a.nodeType === 1 && a !== b && c.push(a); return c } }); var V = "abbr|article|aside|audio|canvas|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video", W = / jQuery\d+="(?:\d+|null)"/g, X = /^\s+/, Y = /<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig, Z = /<([\w:]+)/, $ = /", ""], legend: [1, "
", "
"], thead: [1, "", "
"], tr: [2, "", "
"], td: [3, "", "
"], col: [2, "", "
"], area: [1, "", ""], _default: [0, "", ""] }, bh = U(c); bg.optgroup = bg.option, bg.tbody = bg.tfoot = bg.colgroup = bg.caption = bg.thead, bg.th = bg.td, f.support.htmlSerialize || (bg._default = [1, "div
", "
"]), f.fn.extend({ text: function (a) { if (f.isFunction(a)) return this.each(function (b) { var c = f(this); c.text(a.call(this, b, c.text())) }); if (typeof a != "object" && a !== b) return this.empty().append((this[0] && this[0].ownerDocument || c).createTextNode(a)); return f.text(this) }, wrapAll: function (a) { if (f.isFunction(a)) return this.each(function (b) { f(this).wrapAll(a.call(this, b)) }); if (this[0]) { var b = f(a, this[0].ownerDocument).eq(0).clone(!0); this[0].parentNode && b.insertBefore(this[0]), b.map(function () { var a = this; while (a.firstChild && a.firstChild.nodeType === 1) a = a.firstChild; return a }).append(this) } return this }, wrapInner: function (a) { if (f.isFunction(a)) return this.each(function (b) { f(this).wrapInner(a.call(this, b)) }); return this.each(function () { var b = f(this), c = b.contents(); c.length ? c.wrapAll(a) : b.append(a) }) }, wrap: function (a) { var b = f.isFunction(a); return this.each(function (c) { f(this).wrapAll(b ? a.call(this, c) : a) }) }, unwrap: function () { return this.parent().each(function () { f.nodeName(this, "body") || f(this).replaceWith(this.childNodes) }).end() }, append: function () { return this.domManip(arguments, !0, function (a) { this.nodeType === 1 && this.appendChild(a) }) }, prepend: function () { return this.domManip(arguments, !0, function (a) { this.nodeType === 1 && this.insertBefore(a, this.firstChild) }) }, before: function () { if (this[0] && this[0].parentNode) return this.domManip(arguments, !1, function (a) { this.parentNode.insertBefore(a, this) }); if (arguments.length) { var a = f.clean(arguments); a.push.apply(a, this.toArray()); return this.pushStack(a, "before", arguments) } }, after: function () { if (this[0] && this[0].parentNode) return this.domManip(arguments, !1, function (a) { this.parentNode.insertBefore(a, this.nextSibling) }); if (arguments.length) { var a = this.pushStack(this, "after", arguments); a.push.apply(a, f.clean(arguments)); return a } }, remove: function (a, b) { for (var c = 0, d; (d = this[c]) != null; c++) if (!a || f.filter(a, [d]).length) !b && d.nodeType === 1 && (f.cleanData(d.getElementsByTagName("*")), f.cleanData([d])), d.parentNode && d.parentNode.removeChild(d); return this }, empty: function () + { for (var a = 0, b; (b = this[a]) != null; a++) { b.nodeType === 1 && f.cleanData(b.getElementsByTagName("*")); while (b.firstChild) b.removeChild(b.firstChild) } return this }, clone: function (a, b) { a = a == null ? !1 : a, b = b == null ? a : b; return this.map(function () { return f.clone(this, a, b) }) }, html: function (a) { if (a === b) return this[0] && this[0].nodeType === 1 ? this[0].innerHTML.replace(W, "") : null; if (typeof a == "string" && !ba.test(a) && (f.support.leadingWhitespace || !X.test(a)) && !bg[(Z.exec(a) || ["", ""])[1].toLowerCase()]) { a = a.replace(Y, "<$1>"); try { for (var c = 0, d = this.length; c < d; c++) this[c].nodeType === 1 && (f.cleanData(this[c].getElementsByTagName("*")), this[c].innerHTML = a) } catch (e) { this.empty().append(a) } } else f.isFunction(a) ? this.each(function (b) { var c = f(this); c.html(a.call(this, b, c.html())) }) : this.empty().append(a); return this }, replaceWith: function (a) { if (this[0] && this[0].parentNode) { if (f.isFunction(a)) return this.each(function (b) { var c = f(this), d = c.html(); c.replaceWith(a.call(this, b, d)) }); typeof a != "string" && (a = f(a).detach()); return this.each(function () { var b = this.nextSibling, c = this.parentNode; f(this).remove(), b ? f(b).before(a) : f(c).append(a) }) } return this.length ? this.pushStack(f(f.isFunction(a) ? a() : a), "replaceWith", a) : this }, detach: function (a) { return this.remove(a, !0) }, domManip: function (a, c, d) { var e, g, h, i, j = a[0], k = []; if (!f.support.checkClone && arguments.length === 3 && typeof j == "string" && bd.test(j)) return this.each(function () { f(this).domManip(a, c, d, !0) }); if (f.isFunction(j)) return this.each(function (e) { var g = f(this); a[0] = j.call(this, e, c ? g.html() : b), g.domManip(a, c, d) }); if (this[0]) { i = j && j.parentNode, f.support.parentNode && i && i.nodeType === 11 && i.childNodes.length === this.length ? e = { fragment: i} : e = f.buildFragment(a, this, k), h = e.fragment, h.childNodes.length === 1 ? g = h = h.firstChild : g = h.firstChild; if (g) { c = c && f.nodeName(g, "tr"); for (var l = 0, m = this.length, n = m - 1; l < m; l++) d.call(c ? bi(this[l], g) : this[l], e.cacheable || m > 1 && l < n ? f.clone(h, !0, !0) : h) } k.length && f.each(k, bp) } return this } + }), f.buildFragment = function (a, b, d) { var e, g, h, i, j = a[0]; b && b[0] && (i = b[0].ownerDocument || b[0]), i.createDocumentFragment || (i = c), a.length === 1 && typeof j == "string" && j.length < 512 && i === c && j.charAt(0) === "<" && !bb.test(j) && (f.support.checkClone || !bd.test(j)) && (f.support.html5Clone || !bc.test(j)) && (g = !0, h = f.fragments[j], h && h !== 1 && (e = h)), e || (e = i.createDocumentFragment(), f.clean(a, i, e, d)), g && (f.fragments[j] = h ? e : 1); return { fragment: e, cacheable: g} }, f.fragments = {}, f.each({ appendTo: "append", prependTo: "prepend", insertBefore: "before", insertAfter: "after", replaceAll: "replaceWith" }, function (a, b) { f.fn[a] = function (c) { var d = [], e = f(c), g = this.length === 1 && this[0].parentNode; if (g && g.nodeType === 11 && g.childNodes.length === 1 && e.length === 1) { e[b](this[0]); return this } for (var h = 0, i = e.length; h < i; h++) { var j = (h > 0 ? this.clone(!0) : this).get(); f(e[h])[b](j), d = d.concat(j) } return this.pushStack(d, a, e.selector) } }), f.extend({ clone: function (a, b, c) { var d, e, g, h = f.support.html5Clone || !bc.test("<" + a.nodeName) ? a.cloneNode(!0) : bo(a); if ((!f.support.noCloneEvent || !f.support.noCloneChecked) && (a.nodeType === 1 || a.nodeType === 11) && !f.isXMLDoc(a)) { bk(a, h), d = bl(a), e = bl(h); for (g = 0; d[g]; ++g) e[g] && bk(d[g], e[g]) } if (b) { bj(a, h); if (c) { d = bl(a), e = bl(h); for (g = 0; d[g]; ++g) bj(d[g], e[g]) } } d = e = null; return h }, clean: function (a, b, d, e) { var g; b = b || c, typeof b.createElement == "undefined" && (b = b.ownerDocument || b[0] && b[0].ownerDocument || c); var h = [], i; for (var j = 0, k; (k = a[j]) != null; j++) { typeof k == "number" && (k += ""); if (!k) continue; if (typeof k == "string") if (!_.test(k)) k = b.createTextNode(k); else { k = k.replace(Y, "<$1>"); var l = (Z.exec(k) || ["", ""])[1].toLowerCase(), m = bg[l] || bg._default, n = m[0], o = b.createElement("div"); b === c ? bh.appendChild(o) : U(b).appendChild(o), o.innerHTML = m[1] + k + m[2]; while (n--) o = o.lastChild; if (!f.support.tbody) { var p = $.test(k), q = l === "table" && !p ? o.firstChild && o.firstChild.childNodes : m[1] === "" && !p ? o.childNodes : []; for (i = q.length - 1; i >= 0; --i) f.nodeName(q[i], "tbody") && !q[i].childNodes.length && q[i].parentNode.removeChild(q[i]) } !f.support.leadingWhitespace && X.test(k) && o.insertBefore(b.createTextNode(X.exec(k)[0]), o.firstChild), k = o.childNodes } var r; if (!f.support.appendChecked) if (k[0] && typeof (r = k.length) == "number") for (i = 0; i < r; i++) bn(k[i]); else bn(k); k.nodeType ? h.push(k) : h = f.merge(h, k) } if (d) { g = function (a) { return !a.type || be.test(a.type) }; for (j = 0; h[j]; j++) if (e && f.nodeName(h[j], "script") && (!h[j].type || h[j].type.toLowerCase() === "text/javascript")) e.push(h[j].parentNode ? h[j].parentNode.removeChild(h[j]) : h[j]); else { if (h[j].nodeType === 1) { var s = f.grep(h[j].getElementsByTagName("script"), g); h.splice.apply(h, [j + 1, 0].concat(s)) } d.appendChild(h[j]) } } return h }, cleanData: function (a) { var b, c, d = f.cache, e = f.event.special, g = f.support.deleteExpando; for (var h = 0, i; (i = a[h]) != null; h++) { if (i.nodeName && f.noData[i.nodeName.toLowerCase()]) continue; c = i[f.expando]; if (c) { b = d[c]; if (b && b.events) { for (var j in b.events) e[j] ? f.event.remove(i, j) : f.removeEvent(i, j, b.handle); b.handle && (b.handle.elem = null) } g ? delete i[f.expando] : i.removeAttribute && i.removeAttribute(f.expando), delete d[c] } } } }); var bq = /alpha\([^)]*\)/i, br = /opacity=([^)]*)/, bs = /([A-Z]|^ms)/g, bt = /^-?\d+(?:px)?$/i, bu = /^-?\d/, bv = /^([\-+])=([\-+.\de]+)/, bw = { position: "absolute", visibility: "hidden", display: "block" }, bx = ["Left", "Right"], by = ["Top", "Bottom"], bz, bA, bB; f.fn.css = function (a, c) { if (arguments.length === 2 && c === b) return this; return f.access(this, a, c, !0, function (a, c, d) { return d !== b ? f.style(a, c, d) : f.css(a, c) }) }, f.extend({ cssHooks: { opacity: { get: function (a, b) { if (b) { var c = bz(a, "opacity", "opacity"); return c === "" ? "1" : c } return a.style.opacity } } }, cssNumber: { fillOpacity: !0, fontWeight: !0, lineHeight: !0, opacity: !0, orphans: !0, widows: !0, zIndex: !0, zoom: !0 }, cssProps: { "float": f.support.cssFloat ? "cssFloat" : "styleFloat" }, style: function (a, c, d, e) { if (!!a && a.nodeType !== 3 && a.nodeType !== 8 && !!a.style) { var g, h, i = f.camelCase(c), j = a.style, k = f.cssHooks[i]; c = f.cssProps[i] || i; if (d === b) { if (k && "get" in k && (g = k.get(a, !1, e)) !== b) return g; return j[c] } h = typeof d, h === "string" && (g = bv.exec(d)) && (d = +(g[1] + 1) * +g[2] + parseFloat(f.css(a, c)), h = "number"); if (d == null || h === "number" && isNaN(d)) return; h === "number" && !f.cssNumber[i] && (d += "px"); if (!k || !("set" in k) || (d = k.set(a, d)) !== b) try { j[c] = d } catch (l) { } } }, css: function (a, c, d) { var e, g; c = f.camelCase(c), g = f.cssHooks[c], c = f.cssProps[c] || c, c === "cssFloat" && (c = "float"); if (g && "get" in g && (e = g.get(a, !0, d)) !== b) return e; if (bz) return bz(a, c) }, swap: function (a, b, c) { var d = {}; for (var e in b) d[e] = a.style[e], a.style[e] = b[e]; c.call(a); for (e in b) a.style[e] = d[e] } }), f.curCSS = f.css, f.each(["height", "width"], function (a, b) { f.cssHooks[b] = { get: function (a, c, d) { var e; if (c) { if (a.offsetWidth !== 0) return bC(a, b, d); f.swap(a, bw, function () { e = bC(a, b, d) }); return e } }, set: function (a, b) { if (!bt.test(b)) return b; b = parseFloat(b); if (b >= 0) return b + "px" } } }), f.support.opacity || (f.cssHooks.opacity = { get: function (a, b) { return br.test((b && a.currentStyle ? a.currentStyle.filter : a.style.filter) || "") ? parseFloat(RegExp.$1) / 100 + "" : b ? "1" : "" }, set: function (a, b) { var c = a.style, d = a.currentStyle, e = f.isNumeric(b) ? "alpha(opacity=" + b * 100 + ")" : "", g = d && d.filter || c.filter || ""; c.zoom = 1; if (b >= 1 && f.trim(g.replace(bq, "")) === "") { c.removeAttribute("filter"); if (d && !d.filter) return } c.filter = bq.test(g) ? g.replace(bq, e) : g + " " + e } }), f(function () { f.support.reliableMarginRight || (f.cssHooks.marginRight = { get: function (a, b) { var c; f.swap(a, { display: "inline-block" }, function () { b ? c = bz(a, "margin-right", "marginRight") : c = a.style.marginRight }); return c } }) }), c.defaultView && c.defaultView.getComputedStyle && (bA = function (a, b) { var c, d, e; b = b.replace(bs, "-$1").toLowerCase(), (d = a.ownerDocument.defaultView) && (e = d.getComputedStyle(a, null)) && (c = e.getPropertyValue(b), c === "" && !f.contains(a.ownerDocument.documentElement, a) && (c = f.style(a, b))); return c }), c.documentElement.currentStyle && (bB = function (a, b) { var c, d, e, f = a.currentStyle && a.currentStyle[b], g = a.style; f === null && g && (e = g[b]) && (f = e), !bt.test(f) && bu.test(f) && (c = g.left, d = a.runtimeStyle && a.runtimeStyle.left, d && (a.runtimeStyle.left = a.currentStyle.left), g.left = b === "fontSize" ? "1em" : f || 0, f = g.pixelLeft + "px", g.left = c, d && (a.runtimeStyle.left = d)); return f === "" ? "auto" : f }), bz = bA || bB, f.expr && f.expr.filters && (f.expr.filters.hidden = function (a) { var b = a.offsetWidth, c = a.offsetHeight; return b === 0 && c === 0 || !f.support.reliableHiddenOffsets && (a.style && a.style.display || f.css(a, "display")) === "none" }, f.expr.filters.visible = function (a) { return !f.expr.filters.hidden(a) }); var bD = /%20/g, bE = /\[\]$/, bF = /\r?\n/g, bG = /#.*$/, bH = /^(.*?):[ \t]*([^\r\n]*)\r?$/mg, bI = /^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i, bJ = /^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/, bK = /^(?:GET|HEAD)$/, bL = /^\/\//, bM = /\?/, bN = /)<[^<]*)*<\/script>/gi, bO = /^(?:select|textarea)/i, bP = /\s+/, bQ = /([?&])_=[^&]*/, bR = /^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/, bS = f.fn.load, bT = {}, bU = {}, bV, bW, bX = ["*/"] + ["*"]; try { bV = e.href } catch (bY) { bV = c.createElement("a"), bV.href = "", bV = bV.href } bW = bR.exec(bV.toLowerCase()) || [], f.fn.extend({ load: function (a, c, d) { if (typeof a != "string" && bS) return bS.apply(this, arguments); if (!this.length) return this; var e = a.indexOf(" "); if (e >= 0) { var g = a.slice(e, a.length); a = a.slice(0, e) } var h = "GET"; c && (f.isFunction(c) ? (d = c, c = b) : typeof c == "object" && (c = f.param(c, f.ajaxSettings.traditional), h = "POST")); var i = this; f.ajax({ url: a, type: h, dataType: "html", data: c, complete: function (a, b, c) { c = a.responseText, a.isResolved() && (a.done(function (a) { c = a }), i.html(g ? f("
").append(c.replace(bN, "")).find(g) : c)), d && i.each(d, [c, b, a]) } }); return this }, serialize: function () { return f.param(this.serializeArray()) }, serializeArray: function () { return this.map(function () { return this.elements ? f.makeArray(this.elements) : this }).filter(function () { return this.name && !this.disabled && (this.checked || bO.test(this.nodeName) || bI.test(this.type)) }).map(function (a, b) { var c = f(this).val(); return c == null ? null : f.isArray(c) ? f.map(c, function (a, c) { return { name: b.name, value: a.replace(bF, "\r\n")} }) : { name: b.name, value: c.replace(bF, "\r\n")} }).get() } }), f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), function (a, b) { f.fn[b] = function (a) { return this.on(b, a) } }), f.each(["get", "post"], function (a, c) { f[c] = function (a, d, e, g) { f.isFunction(d) && (g = g || e, e = d, d = b); return f.ajax({ type: c, url: a, data: d, success: e, dataType: g }) } }), f.extend({ getScript: function (a, c) { return f.get(a, b, c, "script") }, getJSON: function (a, b, c) { return f.get(a, b, c, "json") }, ajaxSetup: function (a, b) { b ? b_(a, f.ajaxSettings) : (b = a, a = f.ajaxSettings), b_(a, b); return a }, ajaxSettings: { url: bV, isLocal: bJ.test(bW[1]), global: !0, type: "GET", contentType: "application/x-www-form-urlencoded", processData: !0, async: !0, accepts: { xml: "application/xml, text/xml", html: "text/html", text: "text/plain", json: "application/json, text/javascript", "*": bX }, contents: { xml: /xml/, html: /html/, json: /json/ }, responseFields: { xml: "responseXML", text: "responseText" }, converters: { "* text": a.String, "text html": !0, "text json": f.parseJSON, "text xml": f.parseXML }, flatOptions: { context: !0, url: !0} }, ajaxPrefilter: bZ(bT), ajaxTransport: bZ(bU), ajax: function (a, c) { function w(a, c, l, m) { if (s !== 2) { s = 2, q && clearTimeout(q), p = b, n = m || "", v.readyState = a > 0 ? 4 : 0; var o, r, u, w = c, x = l ? cb(d, v, l) : b, y, z; if (a >= 200 && a < 300 || a === 304) { if (d.ifModified) { if (y = v.getResponseHeader("Last-Modified")) f.lastModified[k] = y; if (z = v.getResponseHeader("Etag")) f.etag[k] = z } if (a === 304) w = "notmodified", o = !0; else try { r = cc(d, x), w = "success", o = !0 } catch (A) { w = "parsererror", u = A } } else { u = w; if (!w || a) w = "error", a < 0 && (a = 0) } v.status = a, v.statusText = "" + (c || w), o ? h.resolveWith(e, [r, w, v]) : h.rejectWith(e, [v, w, u]), v.statusCode(j), j = b, t && g.trigger("ajax" + (o ? "Success" : "Error"), [v, d, o ? r : u]), i.fireWith(e, [v, w]), t && (g.trigger("ajaxComplete", [v, d]), --f.active || f.event.trigger("ajaxStop")) } } typeof a == "object" && (c = a, a = b), c = c || {}; var d = f.ajaxSetup({}, c), e = d.context || d, g = e !== d && (e.nodeType || e instanceof f) ? f(e) : f.event, h = f.Deferred(), i = f.Callbacks("once memory"), j = d.statusCode || {}, k, l = {}, m = {}, n, o, p, q, r, s = 0, t, u, v = { readyState: 0, setRequestHeader: function (a, b) { if (!s) { var c = a.toLowerCase(); a = m[c] = m[c] || a, l[a] = b } return this }, getAllResponseHeaders: function () { return s === 2 ? n : null }, getResponseHeader: function (a) { var c; if (s === 2) { if (!o) { o = {}; while (c = bH.exec(n)) o[c[1].toLowerCase()] = c[2] } c = o[a.toLowerCase()] } return c === b ? null : c }, overrideMimeType: function (a) { s || (d.mimeType = a); return this }, abort: function (a) { a = a || "abort", p && p.abort(a), w(0, a); return this } }; h.promise(v), v.success = v.done, v.error = v.fail, v.complete = i.add, v.statusCode = function (a) { if (a) { var b; if (s < 2) for (b in a) j[b] = [j[b], a[b]]; else b = a[v.status], v.then(b, b) } return this }, d.url = ((a || d.url) + "").replace(bG, "").replace(bL, bW[1] + "//"), d.dataTypes = f.trim(d.dataType || "*").toLowerCase().split(bP), d.crossDomain == null && (r = bR.exec(d.url.toLowerCase()), d.crossDomain = !(!r || r[1] == bW[1] && r[2] == bW[2] && (r[3] || (r[1] === "http:" ? 80 : 443)) == (bW[3] || (bW[1] === "http:" ? 80 : 443)))), d.data && d.processData && typeof d.data != "string" && (d.data = f.param(d.data, d.traditional)), b$(bT, d, c, v); if (s === 2) return !1; t = d.global, d.type = d.type.toUpperCase(), d.hasContent = !bK.test(d.type), t && f.active++ === 0 && f.event.trigger("ajaxStart"); if (!d.hasContent) { d.data && (d.url += (bM.test(d.url) ? "&" : "?") + d.data, delete d.data), k = d.url; if (d.cache === !1) { var x = f.now(), y = d.url.replace(bQ, "$1_=" + x); d.url = y + (y === d.url ? (bM.test(d.url) ? "&" : "?") + "_=" + x : "") } } (d.data && d.hasContent && d.contentType !== !1 || c.contentType) && v.setRequestHeader("Content-Type", d.contentType), d.ifModified && (k = k || d.url, f.lastModified[k] && v.setRequestHeader("If-Modified-Since", f.lastModified[k]), f.etag[k] && v.setRequestHeader("If-None-Match", f.etag[k])), v.setRequestHeader("Accept", d.dataTypes[0] && d.accepts[d.dataTypes[0]] ? d.accepts[d.dataTypes[0]] + (d.dataTypes[0] !== "*" ? ", " + bX + "; q=0.01" : "") : d.accepts["*"]); for (u in d.headers) v.setRequestHeader(u, d.headers[u]); if (d.beforeSend && (d.beforeSend.call(e, v, d) === !1 || s === 2)) { v.abort(); return !1 } for (u in { success: 1, error: 1, complete: 1 }) v[u](d[u]); p = b$(bU, d, c, v); if (!p) w(-1, "No Transport"); else { v.readyState = 1, t && g.trigger("ajaxSend", [v, d]), d.async && d.timeout > 0 && (q = setTimeout(function () { v.abort("timeout") }, d.timeout)); try { s = 1, p.send(l, w) } catch (z) { if (s < 2) w(-1, z); else throw z } } return v }, param: function (a, c) { var d = [], e = function (a, b) { b = f.isFunction(b) ? b() : b, d[d.length] = encodeURIComponent(a) + "=" + encodeURIComponent(b) }; c === b && (c = f.ajaxSettings.traditional); if (f.isArray(a) || a.jquery && !f.isPlainObject(a)) f.each(a, function () { e(this.name, this.value) }); else for (var g in a) ca(g, a[g], c, e); return d.join("&").replace(bD, "+") } }), f.extend({ active: 0, lastModified: {}, etag: {} }); var cd = f.now(), ce = /(\=)\?(&|$)|\?\?/i; f.ajaxSetup({ jsonp: "callback", jsonpCallback: function () { return f.expando + "_" + cd++ } }), f.ajaxPrefilter("json jsonp", function (b, c, d) { var e = b.contentType === "application/x-www-form-urlencoded" && typeof b.data == "string"; if (b.dataTypes[0] === "jsonp" || b.jsonp !== !1 && (ce.test(b.url) || e && ce.test(b.data))) { var g, h = b.jsonpCallback = f.isFunction(b.jsonpCallback) ? b.jsonpCallback() : b.jsonpCallback, i = a[h], j = b.url, k = b.data, l = "$1" + h + "$2"; b.jsonp !== !1 && (j = j.replace(ce, l), b.url === j && (e && (k = k.replace(ce, l)), b.data === k && (j += (/\?/.test(j) ? "&" : "?") + b.jsonp + "=" + h))), b.url = j, b.data = k, a[h] = function (a) { g = [a] }, d.always(function () { a[h] = i, g && f.isFunction(i) && a[h](g[0]) }), b.converters["script json"] = function () { g || f.error(h + " was not called"); return g[0] }, b.dataTypes[0] = "json"; return "script" } }), f.ajaxSetup({ accepts: { script: "text/javascript, application/javascript, application/ecmascript, application/x-ecmascript" }, contents: { script: /javascript|ecmascript/ }, converters: { "text script": function (a) { f.globalEval(a); return a } } }), f.ajaxPrefilter("script", function (a) { a.cache === b && (a.cache = !1), a.crossDomain && (a.type = "GET", a.global = !1) }), f.ajaxTransport("script", function (a) { if (a.crossDomain) { var d, e = c.head || c.getElementsByTagName("head")[0] || c.documentElement; return { send: function (f, g) { d = c.createElement("script"), d.async = "async", a.scriptCharset && (d.charset = a.scriptCharset), d.src = a.url, d.onload = d.onreadystatechange = function (a, c) { if (c || !d.readyState || /loaded|complete/.test(d.readyState)) d.onload = d.onreadystatechange = null, e && d.parentNode && e.removeChild(d), d = b, c || g(200, "success") }, e.insertBefore(d, e.firstChild) }, abort: function () { d && d.onload(0, 1) } } } }); var cf = a.ActiveXObject ? function () { for (var a in ch) ch[a](0, 1) } : !1, cg = 0, ch; f.ajaxSettings.xhr = a.ActiveXObject ? function () { return !this.isLocal && ci() || cj() } : ci, function (a) { f.extend(f.support, { ajax: !!a, cors: !!a && "withCredentials" in a }) } (f.ajaxSettings.xhr()), f.support.ajax && f.ajaxTransport(function (c) { if (!c.crossDomain || f.support.cors) { var d; return { send: function (e, g) { var h = c.xhr(), i, j; c.username ? h.open(c.type, c.url, c.async, c.username, c.password) : h.open(c.type, c.url, c.async); if (c.xhrFields) for (j in c.xhrFields) h[j] = c.xhrFields[j]; c.mimeType && h.overrideMimeType && h.overrideMimeType(c.mimeType), !c.crossDomain && !e["X-Requested-With"] && (e["X-Requested-With"] = "XMLHttpRequest"); try { for (j in e) h.setRequestHeader(j, e[j]) } catch (k) { } h.send(c.hasContent && c.data || null), d = function (a, e) { var j, k, l, m, n; try { if (d && (e || h.readyState === 4)) { d = b, i && (h.onreadystatechange = f.noop, cf && delete ch[i]); if (e) h.readyState !== 4 && h.abort(); else { j = h.status, l = h.getAllResponseHeaders(), m = {}, n = h.responseXML, n && n.documentElement && (m.xml = n), m.text = h.responseText; try { k = h.statusText } catch (o) { k = "" } !j && c.isLocal && !c.crossDomain ? j = m.text ? 200 : 404 : j === 1223 && (j = 204) } } } catch (p) { e || g(-1, p) } m && g(j, k, m, l) }, !c.async || h.readyState === 4 ? d() : (i = ++cg, cf && (ch || (ch = {}, f(a).unload(cf)), ch[i] = d), h.onreadystatechange = d) }, abort: function () { d && d(0, 1) } } } }); var ck = {}, cl, cm, cn = /^(?:toggle|show|hide)$/, co = /^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i, cp, cq = [["height", "marginTop", "marginBottom", "paddingTop", "paddingBottom"], ["width", "marginLeft", "marginRight", "paddingLeft", "paddingRight"], ["opacity"]], cr; f.fn.extend({ show: function (a, b, c) { var d, e; if (a || a === 0) return this.animate(cu("show", 3), a, b, c); for (var g = 0, h = this.length; g < h; g++) d = this[g], d.style && (e = d.style.display, !f._data(d, "olddisplay") && e === "none" && (e = d.style.display = ""), e === "" && f.css(d, "display") === "none" && f._data(d, "olddisplay", cv(d.nodeName))); for (g = 0; g < h; g++) { d = this[g]; if (d.style) { e = d.style.display; if (e === "" || e === "none") d.style.display = f._data(d, "olddisplay") || "" } } return this }, hide: function (a, b, c) { if (a || a === 0) return this.animate(cu("hide", 3), a, b, c); var d, e, g = 0, h = this.length; for (; g < h; g++) d = this[g], d.style && (e = f.css(d, "display"), e !== "none" && !f._data(d, "olddisplay") && f._data(d, "olddisplay", e)); for (g = 0; g < h; g++) this[g].style && (this[g].style.display = "none"); return this }, _toggle: f.fn.toggle, toggle: function (a, b, c) { var d = typeof a == "boolean"; f.isFunction(a) && f.isFunction(b) ? this._toggle.apply(this, arguments) : a == null || d ? this.each(function () { var b = d ? a : f(this).is(":hidden"); f(this)[b ? "show" : "hide"]() }) : this.animate(cu("toggle", 3), a, b, c); return this }, fadeTo: function (a, b, c, d) { return this.filter(":hidden").css("opacity", 0).show().end().animate({ opacity: b }, a, c, d) }, animate: function (a, b, c, d) { function g() { e.queue === !1 && f._mark(this); var b = f.extend({}, e), c = this.nodeType === 1, d = c && f(this).is(":hidden"), g, h, i, j, k, l, m, n, o; b.animatedProperties = {}; for (i in a) { g = f.camelCase(i), i !== g && (a[g] = a[i], delete a[i]), h = a[g], f.isArray(h) ? (b.animatedProperties[g] = h[1], h = a[g] = h[0]) : b.animatedProperties[g] = b.specialEasing && b.specialEasing[g] || b.easing || "swing"; if (h === "hide" && d || h === "show" && !d) return b.complete.call(this); c && (g === "height" || g === "width") && (b.overflow = [this.style.overflow, this.style.overflowX, this.style.overflowY], f.css(this, "display") === "inline" && f.css(this, "float") === "none" && (!f.support.inlineBlockNeedsLayout || cv(this.nodeName) === "inline" ? this.style.display = "inline-block" : this.style.zoom = 1)) } b.overflow != null && (this.style.overflow = "hidden"); for (i in a) j = new f.fx(this, b, i), h = a[i], cn.test(h) ? (o = f._data(this, "toggle" + i) || (h === "toggle" ? d ? "show" : "hide" : 0), o ? (f._data(this, "toggle" + i, o === "show" ? "hide" : "show"), j[o]()) : j[h]()) : (k = co.exec(h), l = j.cur(), k ? (m = parseFloat(k[2]), n = k[3] || (f.cssNumber[i] ? "" : "px"), n !== "px" && (f.style(this, i, (m || 1) + n), l = (m || 1) / j.cur() * l, f.style(this, i, l + n)), k[1] && (m = (k[1] === "-=" ? -1 : 1) * m + l), j.custom(l, m, n)) : j.custom(l, h, "")); return !0 } var e = f.speed(b, c, d); if (f.isEmptyObject(a)) return this.each(e.complete, [!1]); a = f.extend({}, a); return e.queue === !1 ? this.each(g) : this.queue(e.queue, g) }, stop: function (a, c, d) { typeof a != "string" && (d = c, c = a, a = b), c && a !== !1 && this.queue(a || "fx", []); return this.each(function () { function h(a, b, c) { var e = b[c]; f.removeData(a, c, !0), e.stop(d) } var b, c = !1, e = f.timers, g = f._data(this); d || f._unmark(!0, this); if (a == null) for (b in g) g[b] && g[b].stop && b.indexOf(".run") === b.length - 4 && h(this, g, b); else g[b = a + ".run"] && g[b].stop && h(this, g, b); for (b = e.length; b--; ) e[b].elem === this && (a == null || e[b].queue === a) && (d ? e[b](!0) : e[b].saveState(), c = !0, e.splice(b, 1)); (!d || !c) && f.dequeue(this, a) }) } }), f.each({ slideDown: cu("show", 1), slideUp: cu("hide", 1), slideToggle: cu("toggle", 1), fadeIn: { opacity: "show" }, fadeOut: { opacity: "hide" }, fadeToggle: { opacity: "toggle"} }, function (a, b) { f.fn[a] = function (a, c, d) { return this.animate(b, a, c, d) } }), f.extend({ speed: function (a, b, c) { var d = a && typeof a == "object" ? f.extend({}, a) : { complete: c || !c && b || f.isFunction(a) && a, duration: a, easing: c && b || b && !f.isFunction(b) && b }; d.duration = f.fx.off ? 0 : typeof d.duration == "number" ? d.duration : d.duration in f.fx.speeds ? f.fx.speeds[d.duration] : f.fx.speeds._default; if (d.queue == null || d.queue === !0) d.queue = "fx"; d.old = d.complete, d.complete = function (a) { f.isFunction(d.old) && d.old.call(this), d.queue ? f.dequeue(this, d.queue) : a !== !1 && f._unmark(this) }; return d }, easing: { linear: function (a, b, c, d) { return c + d * a }, swing: function (a, b, c, d) { return (-Math.cos(a * Math.PI) / 2 + .5) * d + c } }, timers: [], fx: function (a, b, c) { this.options = b, this.elem = a, this.prop = c, b.orig = b.orig || {} } }), f.fx.prototype = { update: function () { this.options.step && this.options.step.call(this.elem, this.now, this), (f.fx.step[this.prop] || f.fx.step._default)(this) }, cur: function () { if (this.elem[this.prop] != null && (!this.elem.style || this.elem.style[this.prop] == null)) return this.elem[this.prop]; var a, b = f.css(this.elem, this.prop); return isNaN(a = parseFloat(b)) ? !b || b === "auto" ? 0 : b : a }, custom: function (a, c, d) { function h(a) { return e.step(a) } var e = this, g = f.fx; this.startTime = cr || cs(), this.end = c, this.now = this.start = a, this.pos = this.state = 0, this.unit = d || this.unit || (f.cssNumber[this.prop] ? "" : "px"), h.queue = this.options.queue, h.elem = this.elem, h.saveState = function () { e.options.hide && f._data(e.elem, "fxshow" + e.prop) === b && f._data(e.elem, "fxshow" + e.prop, e.start) }, h() && f.timers.push(h) && !cp && (cp = setInterval(g.tick, g.interval)) }, show: function () { var a = f._data(this.elem, "fxshow" + this.prop); this.options.orig[this.prop] = a || f.style(this.elem, this.prop), this.options.show = !0, a !== b ? this.custom(this.cur(), a) : this.custom(this.prop === "width" || this.prop === "height" ? 1 : 0, this.cur()), f(this.elem).show() }, hide: function () { this.options.orig[this.prop] = f._data(this.elem, "fxshow" + this.prop) || f.style(this.elem, this.prop), this.options.hide = !0, this.custom(this.cur(), 0) }, step: function (a) { var b, c, d, e = cr || cs(), g = !0, h = this.elem, i = this.options; if (a || e >= i.duration + this.startTime) { this.now = this.end, this.pos = this.state = 1, this.update(), i.animatedProperties[this.prop] = !0; for (b in i.animatedProperties) i.animatedProperties[b] !== !0 && (g = !1); if (g) { i.overflow != null && !f.support.shrinkWrapBlocks && f.each(["", "X", "Y"], function (a, b) { h.style["overflow" + b] = i.overflow[a] }), i.hide && f(h).hide(); if (i.hide || i.show) for (b in i.animatedProperties) f.style(h, b, i.orig[b]), f.removeData(h, "fxshow" + b, !0), f.removeData(h, "toggle" + b, !0); d = i.complete, d && (i.complete = !1, d.call(h)) } return !1 } i.duration == Infinity ? this.now = e : (c = e - this.startTime, this.state = c / i.duration, this.pos = f.easing[i.animatedProperties[this.prop]](this.state, c, 0, 1, i.duration), this.now = this.start + (this.end - this.start) * this.pos), this.update(); return !0 } }, f.extend(f.fx, { tick: function () { var a, b = f.timers, c = 0; for (; c < b.length; c++) a = b[c], !a() && b[c] === a && b.splice(c--, 1); b.length || f.fx.stop() }, interval: 13, stop: function () { clearInterval(cp), cp = null }, speeds: { slow: 600, fast: 200, _default: 400 }, step: { opacity: function (a) { f.style(a.elem, "opacity", a.now) }, _default: function (a) { a.elem.style && a.elem.style[a.prop] != null ? a.elem.style[a.prop] = a.now + a.unit : a.elem[a.prop] = a.now } } }), f.each(["width", "height"], function (a, b) { f.fx.step[b] = function (a) { f.style(a.elem, b, Math.max(0, a.now) + a.unit) } }), f.expr && f.expr.filters && (f.expr.filters.animated = function (a) { return f.grep(f.timers, function (b) { return a === b.elem }).length }); var cw = /^t(?:able|d|h)$/i, cx = /^(?:body|html)$/i; "getBoundingClientRect" in c.documentElement ? f.fn.offset = function (a) { var b = this[0], c; if (a) return this.each(function (b) { f.offset.setOffset(this, a, b) }); if (!b || !b.ownerDocument) return null; if (b === b.ownerDocument.body) return f.offset.bodyOffset(b); try { c = b.getBoundingClientRect() } catch (d) { } var e = b.ownerDocument, g = e.documentElement; if (!c || !f.contains(g, b)) return c ? { top: c.top, left: c.left} : { top: 0, left: 0 }; var h = e.body, i = cy(e), j = g.clientTop || h.clientTop || 0, k = g.clientLeft || h.clientLeft || 0, l = i.pageYOffset || f.support.boxModel && g.scrollTop || h.scrollTop, m = i.pageXOffset || f.support.boxModel && g.scrollLeft || h.scrollLeft, n = c.top + l - j, o = c.left + m - k; return { top: n, left: o} } : f.fn.offset = function (a) { var b = this[0]; if (a) return this.each(function (b) { f.offset.setOffset(this, a, b) }); if (!b || !b.ownerDocument) return null; if (b === b.ownerDocument.body) return f.offset.bodyOffset(b); var c, d = b.offsetParent, e = b, g = b.ownerDocument, h = g.documentElement, i = g.body, j = g.defaultView, k = j ? j.getComputedStyle(b, null) : b.currentStyle, l = b.offsetTop, m = b.offsetLeft; while ((b = b.parentNode) && b !== i && b !== h) { if (f.support.fixedPosition && k.position === "fixed") break; c = j ? j.getComputedStyle(b, null) : b.currentStyle, l -= b.scrollTop, m -= b.scrollLeft, b === d && (l += b.offsetTop, m += b.offsetLeft, f.support.doesNotAddBorder && (!f.support.doesAddBorderForTableAndCells || !cw.test(b.nodeName)) && (l += parseFloat(c.borderTopWidth) || 0, m += parseFloat(c.borderLeftWidth) || 0), e = d, d = b.offsetParent), f.support.subtractsBorderForOverflowNotVisible && c.overflow !== "visible" && (l += parseFloat(c.borderTopWidth) || 0, m += parseFloat(c.borderLeftWidth) || 0), k = c } if (k.position === "relative" || k.position === "static") l += i.offsetTop, m += i.offsetLeft; f.support.fixedPosition && k.position === "fixed" && (l += Math.max(h.scrollTop, i.scrollTop), m += Math.max(h.scrollLeft, i.scrollLeft)); return { top: l, left: m} }, f.offset = { bodyOffset: function (a) { var b = a.offsetTop, c = a.offsetLeft; f.support.doesNotIncludeMarginInBodyOffset && (b += parseFloat(f.css(a, "marginTop")) || 0, c += parseFloat(f.css(a, "marginLeft")) || 0); return { top: b, left: c} }, setOffset: function (a, b, c) { var d = f.css(a, "position"); d === "static" && (a.style.position = "relative"); var e = f(a), g = e.offset(), h = f.css(a, "top"), i = f.css(a, "left"), j = (d === "absolute" || d === "fixed") && f.inArray("auto", [h, i]) > -1, k = {}, l = {}, m, n; j ? (l = e.position(), m = l.top, n = l.left) : (m = parseFloat(h) || 0, n = parseFloat(i) || 0), f.isFunction(b) && (b = b.call(a, c, g)), b.top != null && (k.top = b.top - g.top + m), b.left != null && (k.left = b.left - g.left + n), "using" in b ? b.using.call(a, k) : e.css(k) } }, f.fn.extend({ position: function () { if (!this[0]) return null; var a = this[0], b = this.offsetParent(), c = this.offset(), d = cx.test(b[0].nodeName) ? { top: 0, left: 0} : b.offset(); c.top -= parseFloat(f.css(a, "marginTop")) || 0, c.left -= parseFloat(f.css(a, "marginLeft")) || 0, d.top += parseFloat(f.css(b[0], "borderTopWidth")) || 0, d.left += parseFloat(f.css(b[0], "borderLeftWidth")) || 0; return { top: c.top - d.top, left: c.left - d.left} }, offsetParent: function () { return this.map(function () { var a = this.offsetParent || c.body; while (a && !cx.test(a.nodeName) && f.css(a, "position") === "static") a = a.offsetParent; return a }) } }), f.each(["Left", "Top"], function (a, c) { var d = "scroll" + c; f.fn[d] = function (c) { var e, g; if (c === b) { e = this[0]; if (!e) return null; g = cy(e); return g ? "pageXOffset" in g ? g[a ? "pageYOffset" : "pageXOffset"] : f.support.boxModel && g.document.documentElement[d] || g.document.body[d] : e[d] } return this.each(function () { g = cy(this), g ? g.scrollTo(a ? f(g).scrollLeft() : c, a ? c : f(g).scrollTop()) : this[d] = c }) } }), f.each(["Height", "Width"], function (a, c) { var d = c.toLowerCase(); f.fn["inner" + c] = function () { var a = this[0]; return a ? a.style ? parseFloat(f.css(a, d, "padding")) : this[d]() : null }, f.fn["outer" + c] = function (a) { var b = this[0]; return b ? b.style ? parseFloat(f.css(b, d, a ? "margin" : "border")) : this[d]() : null }, f.fn[d] = function (a) { var e = this[0]; if (!e) return a == null ? null : this; if (f.isFunction(a)) return this.each(function (b) { var c = f(this); c[d](a.call(this, b, c[d]())) }); if (f.isWindow(e)) { var g = e.document.documentElement["client" + c], h = e.document.body; return e.document.compatMode === "CSS1Compat" && g || h && h["client" + c] || g } if (e.nodeType === 9) return Math.max(e.documentElement["client" + c], e.body["scroll" + c], e.documentElement["scroll" + c], e.body["offset" + c], e.documentElement["offset" + c]); if (a === b) { var i = f.css(e, d), j = parseFloat(i); return f.isNumeric(j) ? j : i } return this.css(d, typeof a == "string" ? a : a + "px") } }), a.jQuery = a.$ = f, typeof define == "function" && define.amd && define.amd.jQuery && define("jquery", [], function () { return f }) +})(window); \ No newline at end of file diff --git a/public_html/visual_demo/article.py b/public_html/visual_demo/article.py new file mode 100755 index 0000000..19da9ea --- /dev/null +++ b/public_html/visual_demo/article.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python + +import cgi +import cgitb +cgitb.enable() + +import os, sys +sys.path.extend(('../../','../../dispatch','../../langdet')) +from db2zmq_cleartext import DB_get_full_article +import pprint + +import psycopg2, psycopg2.extras +psycopg2.extensions.register_type(psycopg2.extensions.UNICODE) + +def htmlStr(x): + if type(x)==unicode: + x = x.encode('utf8','replace') + else: + x = str(x) + + assert type(x)==str + if x.startswith('http://') and len(x)<300: + return '%s' % (x.replace('&','&').replace('<','<').replace('"','"'), x) + else: + return x.replace('&','&').replace('<','<').replace('\n','
') + + + +def resultTableToHtml(rows, columns=None, ignore_columns=[]): + """ + HTML display of DB results. If `columns` is omitted, it's autodetected + `rows` should be a list of dicts. + """ + if not rows: + return '(No rows)' + + columns = columns or sorted(rows[0].keys()) + columns = [c for c in columns if c not in ignore_columns] + + ret = '
' + ''.join('' % c for c in columns) + '' + for row in rows: + ret += '' + ''.join('' % htmlStr(row[c]) for c in columns) + '' + ret += '
%s
%s

\n' + return ret + + +def articleDetail(articleHandle): + # connect to DB + conn = psycopg2.connect(database='news', host='maximus', user='mitjat', password='XXX_GITHUB_XXX') + cur = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) + cur.execute("SET bytea_output TO 'escape'") + res = '' + + # identify the article + if articleHandle.isdigit(): + articleId = int(articleHandle) + else: + cur.execute("SELECT fa_id FROM feed_article_urls WHERE url_hash(%s)=url_hash(url) and %s=url LIMIT 1", (articleHandle,articleHandle)) + if cur.rowcount: + articleId = cur.fetchone()[0] + else: + return 'URL not in the DB' + res += '

Article %s

' % articleId + + # fetch what db2zmq sees + zmq_data = DB_get_full_article(cur, articleId) + res += '\n\n

zmq

' + '
'+pprint.pformat(zmq_data)+'
' + + # fetch raw results + cur.execute("SELECT * FROM feed_article_meta WHERE id=%s", (articleId,)) + res += '\n\n

feed_article_meta

' + resultTableToHtml(cur.fetchall()) + + cur.execute("SELECT * FROM feed_article WHERE id=%s", (articleId,)) + rows = cur.fetchall() + res += '\n\n

feed_article

' + resultTableToHtml(rows) + feedId = rows[0]['feedid'] + siteId = rows[0]['feedsiteid'] + + cur.execute("SELECT * FROM feed WHERE id=%s", (feedId,)) + res += '\n\n

feed

' + resultTableToHtml(cur.fetchall()) + + cur.execute("SELECT * FROM site WHERE id=%s", (siteId,)) + res += '\n\n

site

' + resultTableToHtml(cur.fetchall()) + + cur.execute("SELECT * FROM processed_article WHERE feed_articleid=%s", (articleId,)) + rows = cur.fetchall() + res += '\n\n

processed_article

' + resultTableToHtml(rows, ignore_columns=['content']) + for (i,row) in enumerate(rows): + res += 'content (row %d)
%s
' % (i, row['content'].encode('utf8','replace').replace('&','&').replace('<','<')) + + cur.execute("""SELECT story_id, ARRAY_AGG('
  • '||m.id||' - '||COALESCE(m.title,'(no title)')) AS story_articles FROM feed_article_googles g JOIN feed_article_meta m ON (g.feed_articleid=m.id) WHERE story_id=(SELECT story_id FROM feed_article_googles WHERE feed_articleid=%s) GROUP BY story_id;""", (articleId,)) + res += '\n\n

    Google clusters/stories

    ' + for row in cur: + res += 'Story '+str(row['story_id'])+'
      '+'\n'.join(row['story_articles'])+'
    ' + + cur.execute("SELECT * FROM article WHERE feed_articleid=%s", (articleId,)) + rows = cur.fetchall() + res += '\n\n

    article

    ' + resultTableToHtml(rows, ignore_columns=['content']) + for (i,row) in enumerate(rows): + res += 'content (row %d)
    %r
    ' % (i, str(row['content']).replace('&','&').replace('<','<')) + + return res + +header = """ + + + + + + +""" + +footer = """ + + +""" + +if __name__=='__main__': + # parse request + form = cgi.FieldStorage() + articleHandle = form.getvalue('id') + + # print response + print 'Content-type: text/html\n\n' + + print 'New query: ID or URL' + print '
    ' + + if articleHandle: + print header + articleDetail(articleHandle) + footer + else: + print 'Gimme an id' + \ No newline at end of file diff --git a/public_html/visual_demo/index.html b/public_html/visual_demo/index.html new file mode 100644 index 0000000..396ed60 --- /dev/null +++ b/public_html/visual_demo/index.html @@ -0,0 +1,217 @@ + + + + + + + + + + + + + + + + + + +
    +
    +
    +

    Real-time newsfeed demo

    +
    + Since this page was opened: 0 articles received, 0 skipped for legibility. +
    +
    +
    + Updates stopped
    + + Move the mouse away to resume.
    + Click an article for additional info. +
    +
    +
    + + diff --git a/public_html/visual_demo/lib/eventsource-remy.js b/public_html/visual_demo/lib/eventsource-remy.js new file mode 100644 index 0000000..829a7f8 --- /dev/null +++ b/public_html/visual_demo/lib/eventsource-remy.js @@ -0,0 +1,174 @@ +;(function (global) { + +//if ("EventSource" in global) return; + +var reTrim = /^(\s|\u00A0)+|(\s|\u00A0)+$/g; + +var EventSource = function (url) { + var eventsource = this, + interval = 500, // polling interval + lastEventId = null, + cache = ''; + + if (!url || typeof url != 'string') { + throw new SyntaxError('Not enough arguments'); + } + + this.URL = url; + this.readyState = this.CONNECTING; + this._pollTimer = null; + this._xhr = null; + + function pollAgain() { + eventsource._pollTimer = setTimeout(function () { + poll.call(eventsource); + }, interval); + } + + function poll() { + try { // force hiding of the error message... insane? + if (eventsource.readyState == eventsource.CLOSED) return; + + // NOTE: IE7 and upwards support + var xhr = new XMLHttpRequest(); + xhr.open('GET', eventsource.URL, true); + xhr.setRequestHeader('Accept', 'text/event-stream'); + xhr.setRequestHeader('Cache-Control', 'no-cache'); + // we must make use of this on the server side if we're working with Android - because they don't trigger + // readychange until the server connection is closed + xhr.setRequestHeader('X-Requested-With', 'XMLHttpRequest'); + + if (lastEventId != null) xhr.setRequestHeader('Last-Event-ID', lastEventId); + cache = ''; + + xhr.timeout = 50000; + xhr.onreadystatechange = function () { + if ((this.readyState == 3 || this.readyState == 4) && this.status == 200) { + // on success + if (eventsource.readyState == eventsource.CONNECTING) { + eventsource.readyState = eventsource.OPEN; + eventsource.dispatchEvent('open', { type: 'open' }); + } + + var responseText = ''; + try { + responseText = this.responseText || ''; + } catch (e) {} + + // process this.responseText + var parts = responseText.substr(cache.length).split("\n"), + eventType = 'message', + data = [], + i = 0, + line = ''; + + cache = responseText; + + // TODO handle 'event' (for buffer name), retry + for (; i < parts.length; i++) { + line = parts[i].replace(reTrim, ''); + if (line.indexOf('event') == 0) { + eventType = line.replace(/event:?\s*/, ''); + } else if (line.indexOf('data') == 0) { + data.push(line.replace(/data:?\s*/, '')); + } else if (line.indexOf('id:') == 0) { + lastEventId = line.replace(/id:?\s*/, ''); + } else if (line.indexOf('id') == 0) { // this resets the id + lastEventId = null; + } else if (line == '') { + if (data.length) { + var event = new MessageEvent(data.join('\n'), eventsource.url, lastEventId); + eventsource.dispatchEvent(eventType, event); + data = []; + eventType = 'message'; + } + } + } + + if (this.readyState == 4) pollAgain(); + // don't need to poll again, because we're long-loading + } else if (eventsource.readyState !== eventsource.CLOSED) { + if (this.readyState == 4) { // and some other status + // dispatch error + eventsource.readyState = eventsource.CONNECTING; + eventsource.dispatchEvent('error', { type: 'error' }); + pollAgain(); + } else if (this.readyState == 0) { // likely aborted + pollAgain(); + } else { + } + } + }; + + xhr.send(); + + setTimeout(function () { + if (true || xhr.readyState == 3) xhr.abort(); + }, xhr.timeout); + + eventsource._xhr = xhr; + + } catch (e) { // in an attempt to silence the errors + eventsource.dispatchEvent('error', { type: 'error', data: e.message }); // ??? + } + }; + + poll(); // init now +}; + +EventSource.prototype = { + close: function () { + // closes the connection - disabling the polling + this.readyState = this.CLOSED; + clearInterval(this._pollTimer); + this._xhr.abort(); + }, + CONNECTING: 0, + OPEN: 1, + CLOSED: 2, + dispatchEvent: function (type, event) { + var handlers = this['_' + type + 'Handlers']; + if (handlers) { + for (var i = 0; i < handlers.length; i++) { + handlers[i].call(this, event); + } + } + + if (this['on' + type]) { + this['on' + type].call(this, event); + } + }, + addEventListener: function (type, handler) { + if (!this['_' + type + 'Handlers']) { + this['_' + type + 'Handlers'] = []; + } + + this['_' + type + 'Handlers'].push(handler); + }, + removeEventListener: function () { + // TODO + }, + onerror: null, + onmessage: null, + onopen: null, + readyState: 0, + URL: '' +}; + +var MessageEvent = function (data, origin, lastEventId) { + this.data = data; + this.origin = origin; + this.lastEventId = lastEventId || ''; +}; + +MessageEvent.prototype = { + data: null, + type: 'message', + lastEventId: '', + origin: '' +}; + +if ('module' in global) module.exports = EventSource; +global.EventSource = EventSource; + +})(this); diff --git a/public_html/visual_demo/lib/eventsource.js b/public_html/visual_demo/lib/eventsource.js new file mode 100644 index 0000000..05e4a69 --- /dev/null +++ b/public_html/visual_demo/lib/eventsource.js @@ -0,0 +1,400 @@ +/*jslint indent: 2 */ +/*global setTimeout, clearTimeout */ + +(function (global) { + "use strict"; + + function EventTarget() { + return this; + } + + EventTarget.prototype = { + nextListener: null, + throwError: function (e) { + setTimeout(function () { + throw e; + }, 0); + }, + invokeEvent: function (event) { + var type = String(event.type), + i = this.nextListener, + phase = event.eventPhase, + candidates = { + next: null + }, + j = candidates; + while (i) { + if (i.type === type && !(phase === 1 && !i.capture) && !(phase === 3 && i.capture)) { + j = j.next = { + callback: i.callback, + next: null + }; + } + i = i.nextListener; + } + j = candidates.next; + while (j) { + event.currentTarget = this; + try { + j.callback.call(this, event); + } catch (e) { + this.throwError(e); + } + event.currentTarget = null; + j = j.next; + } + }, + dispatchEvent: function (event) { + event.eventPhase = 2; + this.invokeEvent(event); + }, + addEventListener: function (type, callback, capture) { + type = String(type); + capture = Boolean(capture); + var listener = this, + i = listener.nextListener; + while (i) { + if (i.type === type && i.callback === callback && i.capture === capture) { + return; + } + listener = i; + i = i.nextListener; + } + listener.nextListener = { + nextListener: null, + type: type, + callback: callback, + capture: capture + }; + }, + removeEventListener: function (type, callback, capture) { + type = String(type); + capture = Boolean(capture); + var listener = this, + i = listener.nextListener; + while (i) { + if (i.type === type && i.callback === callback && i.capture === capture) { + listener.nextListener = i.nextListener; + return; + } + listener = i; + i = i.nextListener; + } + } + }; + + // http://blogs.msdn.com/b/ieinternals/archive/2010/04/06/comet-streaming-in-internet-explorer-with-xmlhttprequest-and-xdomainrequest.aspx?PageIndex=1#comments + // XDomainRequest does not have a binary interface. To use with non-text, first base64 to string. + // http://cometdaily.com/2008/page/3/ + + var XHR = global.XMLHttpRequest, + xhr2 = XHR && global.ProgressEvent && ((new XHR()).withCredentials !== undefined), + Transport = xhr2 ? XHR : global.XDomainRequest, + CONNECTING = 0, + OPEN = 1, + CLOSED = 2, + proto; + + function empty() {} + + function EventSource(url, options) { + url = String(url); + + var that = this, + retry = 1000, + retry2 = retry, + heartbeatTimeout = 45000, + xhrTimeout = null, + wasActivity = false, + lastEventId = '', + xhr = new Transport(), + reconnectTimeout = null, + withCredentials = Boolean(xhr2 && options && options.withCredentials), + offset, + charOffset, + opened, + buffer = { + data: '', + lastEventId: '', + name: '' + }, + tail = { + next: null, + event: null, + readyState: null + }, + head = tail, + channel = null; + + options = null; + that.url = url; + + that.readyState = CONNECTING; + that.withCredentials = withCredentials; + + // Queue a task which, if the readyState is set to a value other than CLOSED, + // sets the readyState to ... and fires event + + function onTimeout() { + var event = head.event, + readyState = head.readyState, + type = String(event.type); + head = head.next; + + if (that.readyState !== CLOSED) { // http://www.w3.org/Bugs/Public/show_bug.cgi?id=14331 + if (readyState !== null) { + that.readyState = readyState; + } + + if (readyState === CONNECTING) { + // setTimeout will wait before previous setTimeout(0) have completed + retry2 = Math.min(retry2, 86400000); + reconnectTimeout = setTimeout(openConnection, retry2); + retry2 = retry2 * 2 + 1; + } + + event.target = that; + that.dispatchEvent(event); + + if (/^(message|error|open)$/.test(type) && typeof that['on' + type] === 'function') { + // as IE 8 doesn't support getters/setters, we can't implement 'onmessage' via addEventListener/removeEventListener + that['on' + type](event); + } + } + } + + // MessageChannel support: IE 10, Opera 11.6x?, Chrome ?, Safari ? + if (global.MessageChannel) { + channel = new global.MessageChannel(); + channel.port1.onmessage = onTimeout; + } + + function queue(event, readyState) { + tail.event = event; + tail.readyState = readyState; + tail = tail.next = { + next: null, + event: null, + readyState: null + }; + if (channel) { + channel.port2.postMessage(''); + } else { + setTimeout(onTimeout, 0); + } + } + + function close() { + // http://dev.w3.org/html5/eventsource/ The close() method must close the connection, if any; must abort any instances of the fetch algorithm started for this EventSource object; and must set the readyState attribute to CLOSED. + if (xhr !== null) { + xhr.onload = xhr.onerror = xhr.onprogress = xhr.onreadystatechange = empty; + xhr.abort(); + xhr = null; + } + if (reconnectTimeout !== null) { + clearTimeout(reconnectTimeout); + reconnectTimeout = null; + } + if (xhrTimeout !== null) { + clearTimeout(xhrTimeout); + xhrTimeout = null; + } + that.readyState = CLOSED; + } + + that.close = close; + + EventTarget.call(that); + + function onXHRTimeout() { + xhrTimeout = null; + if (wasActivity) { + wasActivity = false; + xhrTimeout = setTimeout(onXHRTimeout, heartbeatTimeout); + } else { + xhr.onload = xhr.onerror = xhr.onprogress = empty; + xhr.abort(); + onError.call(xhr); + } + } + + function onProgress() { + var responseText = xhr.responseText || '', + contentType, + i, + j, + part, + stream, + field, + value; + + wasActivity = true; + + if (!opened) { + try { + contentType = xhr.getResponseHeader ? xhr.getResponseHeader('Content-Type') : xhr.contentType; + } catch (error) { + // invalid state error when xhr.getResponseHeader called after xhr.abort in Chrome 18 + setTimeout(function () { + throw error; + }, 0); + } + if (contentType && (/^text\/event\-stream/i).test(contentType)) { + queue({type: 'open'}, OPEN); + opened = true; + retry2 = retry; + } + } + + if (opened && (/\r|\n/).test(responseText.slice(charOffset))) { + part = responseText.slice(offset); + stream = part.replace(/\r\n?/g, '\n').split('\n'); + + offset += part.length - stream[stream.length - 1].length; + for (i = 0; i < stream.length - 1; i += 1) { + field = stream[i]; + value = ''; + j = field.indexOf(':'); + if (j !== -1) { + value = field.slice(j + (field.charAt(j + 1) === ' ' ? 2 : 1)); + field = field.slice(0, j); + } + + if (!stream[i]) { + // dispatch the event + if (buffer.data) { + lastEventId = buffer.lastEventId; + queue({ + type: buffer.name || 'message', + lastEventId: lastEventId, + data: buffer.data.replace(/\n$/, '') + }, null); + } + // Set the data buffer and the event name buffer to the empty string. + buffer.data = ''; + buffer.name = ''; + } + + if (field === 'event') { + buffer.name = value; + } + + if (field === 'id') { + buffer.lastEventId = value; // see http://www.w3.org/Bugs/Public/show_bug.cgi?id=13761 + } + + if (field === 'retry') { + if (/^\d+$/.test(value)) { + retry = Number(value); + retry2 = retry; + } + } + + if (field === 'heartbeatTimeout') {//! + heartbeatTimeout = Math.min(Math.max(1, Number(value) || 0), 86400000); + if (xhrTimeout !== null) { + clearTimeout(xhrTimeout); + xhrTimeout = setTimeout(onXHRTimeout, heartbeatTimeout); + } + } + + if (field === 'data') { + buffer.data += value + '\n'; + } + } + } + charOffset = responseText.length; + } + + function onError() { + onProgress(); + //if (opened) { + // reestablishes the connection + queue({type: 'error'}, CONNECTING); + //} else { + // fail the connection + // queue({type: 'error'}, CLOSED); + //} + if (xhrTimeout !== null) { + clearTimeout(xhrTimeout); + xhrTimeout = null; + } + } + + function onReadyStateChange() { + if (xhr.readyState === 3) { + onProgress(); + } + } + + function openConnection() { + // XDomainRequest#abort removes onprogress, onerror, onload + + xhr.onload = xhr.onerror = onError; + + // onprogress fires multiple times while readyState === 3 + // onprogress should be setted before calling "open" for Firefox 3.6 + xhr.onprogress = onProgress; + + // Firefox 3.6 + xhr.onreadystatechange = onReadyStateChange; + + reconnectTimeout = null; + wasActivity = false; + xhrTimeout = setTimeout(onXHRTimeout, heartbeatTimeout); + + offset = 0; + charOffset = 0; + opened = false; + buffer.data = ''; + buffer.name = ''; + buffer.lastEventId = lastEventId;//resets to last successful + + // with GET method in FF xhr.onreadystatechange with readyState === 3 doesn't work + POST = no-cache + xhr.open('POST', url, true); + + // withCredentials should be setted after "open" for Safari and Chrome (< 19 ?) + xhr.withCredentials = withCredentials; + + if (xhr.setRequestHeader) { // !XDomainRequest + // http://dvcs.w3.org/hg/cors/raw-file/tip/Overview.html + // Cache-Control is not a simple header + // Request header field Cache-Control is not allowed by Access-Control-Allow-Headers. + //xhr.setRequestHeader('Cache-Control', 'no-cache'); + + // Chrome bug: + // http://code.google.com/p/chromium/issues/detail?id=71694 + // If you force Chrome to have a whitelisted content-type, either explicitly with setRequestHeader(), or implicitly by sending a FormData, then no preflight is done. + xhr.setRequestHeader('Content-type', 'application/x-www-form-urlencoded'); + xhr.setRequestHeader('Accept', 'text/event-stream'); + + // Request header field Last-Event-ID is not allowed by Access-Control-Allow-Headers. + // +setRequestHeader shouldn't be used to avoid preflight requests + //if (lastEventId !== '') { + // xhr.setRequestHeader('Last-Event-ID', lastEventId); + //} + } + xhr.send(lastEventId !== '' ? 'Last-Event-ID=' + encodeURIComponent(lastEventId) : ''); + } + + openConnection(); + + return that; + } + + proto = new EventTarget(); + proto.CONNECTING = CONNECTING; + proto.OPEN = OPEN; + proto.CLOSED = CLOSED; + + EventSource.prototype = proto; + EventSource.CONNECTING = CONNECTING; + EventSource.OPEN = OPEN; + EventSource.CLOSED = CLOSED; + proto = null; + + if (Transport) { + global.EventSource = EventSource; + } + +}(this)); diff --git a/public_html/visual_demo/lib/jquery.min.js b/public_html/visual_demo/lib/jquery.min.js new file mode 100644 index 0000000..198b3ff --- /dev/null +++ b/public_html/visual_demo/lib/jquery.min.js @@ -0,0 +1,4 @@ +/*! jQuery v1.7.1 jquery.com | jquery.org/license */ +(function(a,b){function cy(a){return f.isWindow(a)?a:a.nodeType===9?a.defaultView||a.parentWindow:!1}function cv(a){if(!ck[a]){var b=c.body,d=f("<"+a+">").appendTo(b),e=d.css("display");d.remove();if(e==="none"||e===""){cl||(cl=c.createElement("iframe"),cl.frameBorder=cl.width=cl.height=0),b.appendChild(cl);if(!cm||!cl.createElement)cm=(cl.contentWindow||cl.contentDocument).document,cm.write((c.compatMode==="CSS1Compat"?"":"")+""),cm.close();d=cm.createElement(a),cm.body.appendChild(d),e=f.css(d,"display"),b.removeChild(cl)}ck[a]=e}return ck[a]}function cu(a,b){var c={};f.each(cq.concat.apply([],cq.slice(0,b)),function(){c[this]=a});return c}function ct(){cr=b}function cs(){setTimeout(ct,0);return cr=f.now()}function cj(){try{return new a.ActiveXObject("Microsoft.XMLHTTP")}catch(b){}}function ci(){try{return new a.XMLHttpRequest}catch(b){}}function cc(a,c){a.dataFilter&&(c=a.dataFilter(c,a.dataType));var d=a.dataTypes,e={},g,h,i=d.length,j,k=d[0],l,m,n,o,p;for(g=1;g0){if(c!=="border")for(;g=0===c})}function S(a){return!a||!a.parentNode||a.parentNode.nodeType===11}function K(){return!0}function J(){return!1}function n(a,b,c){var d=b+"defer",e=b+"queue",g=b+"mark",h=f._data(a,d);h&&(c==="queue"||!f._data(a,e))&&(c==="mark"||!f._data(a,g))&&setTimeout(function(){!f._data(a,e)&&!f._data(a,g)&&(f.removeData(a,d,!0),h.fire())},0)}function m(a){for(var b in a){if(b==="data"&&f.isEmptyObject(a[b]))continue;if(b!=="toJSON")return!1}return!0}function l(a,c,d){if(d===b&&a.nodeType===1){var e="data-"+c.replace(k,"-$1").toLowerCase();d=a.getAttribute(e);if(typeof d=="string"){try{d=d==="true"?!0:d==="false"?!1:d==="null"?null:f.isNumeric(d)?parseFloat(d):j.test(d)?f.parseJSON(d):d}catch(g){}f.data(a,c,d)}else d=b}return d}function h(a){var b=g[a]={},c,d;a=a.split(/\s+/);for(c=0,d=a.length;c)[^>]*$|#([\w\-]*)$)/,j=/\S/,k=/^\s+/,l=/\s+$/,m=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,n=/^[\],:{}\s]*$/,o=/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g,p=/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g,q=/(?:^|:|,)(?:\s*\[)+/g,r=/(webkit)[ \/]([\w.]+)/,s=/(opera)(?:.*version)?[ \/]([\w.]+)/,t=/(msie) ([\w.]+)/,u=/(mozilla)(?:.*? rv:([\w.]+))?/,v=/-([a-z]|[0-9])/ig,w=/^-ms-/,x=function(a,b){return(b+"").toUpperCase()},y=d.userAgent,z,A,B,C=Object.prototype.toString,D=Object.prototype.hasOwnProperty,E=Array.prototype.push,F=Array.prototype.slice,G=String.prototype.trim,H=Array.prototype.indexOf,I={};e.fn=e.prototype={constructor:e,init:function(a,d,f){var g,h,j,k;if(!a)return this;if(a.nodeType){this.context=this[0]=a,this.length=1;return this}if(a==="body"&&!d&&c.body){this.context=c,this[0]=c.body,this.selector=a,this.length=1;return this}if(typeof a=="string"){a.charAt(0)!=="<"||a.charAt(a.length-1)!==">"||a.length<3?g=i.exec(a):g=[null,a,null];if(g&&(g[1]||!d)){if(g[1]){d=d instanceof e?d[0]:d,k=d?d.ownerDocument||d:c,j=m.exec(a),j?e.isPlainObject(d)?(a=[c.createElement(j[1])],e.fn.attr.call(a,d,!0)):a=[k.createElement(j[1])]:(j=e.buildFragment([g[1]],[k]),a=(j.cacheable?e.clone(j.fragment):j.fragment).childNodes);return e.merge(this,a)}h=c.getElementById(g[2]);if(h&&h.parentNode){if(h.id!==g[2])return f.find(a);this.length=1,this[0]=h}this.context=c,this.selector=a;return this}return!d||d.jquery?(d||f).find(a):this.constructor(d).find(a)}if(e.isFunction(a))return f.ready(a);a.selector!==b&&(this.selector=a.selector,this.context=a.context);return e.makeArray(a,this)},selector:"",jquery:"1.7.1",length:0,size:function(){return this.length},toArray:function(){return F.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this[this.length+a]:this[a]},pushStack:function(a,b,c){var d=this.constructor();e.isArray(a)?E.apply(d,a):e.merge(d,a),d.prevObject=this,d.context=this.context,b==="find"?d.selector=this.selector+(this.selector?" ":"")+c:b&&(d.selector=this.selector+"."+b+"("+c+")");return d},each:function(a,b){return e.each(this,a,b)},ready:function(a){e.bindReady(),A.add(a);return this},eq:function(a){a=+a;return a===-1?this.slice(a):this.slice(a,a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(F.apply(this,arguments),"slice",F.call(arguments).join(","))},map:function(a){return this.pushStack(e.map(this,function(b,c){return a.call(b,c,b)}))},end:function(){return this.prevObject||this.constructor(null)},push:E,sort:[].sort,splice:[].splice},e.fn.init.prototype=e.fn,e.extend=e.fn.extend=function(){var a,c,d,f,g,h,i=arguments[0]||{},j=1,k=arguments.length,l=!1;typeof i=="boolean"&&(l=i,i=arguments[1]||{},j=2),typeof i!="object"&&!e.isFunction(i)&&(i={}),k===j&&(i=this,--j);for(;j0)return;A.fireWith(c,[e]),e.fn.trigger&&e(c).trigger("ready").off("ready")}},bindReady:function(){if(!A){A=e.Callbacks("once memory");if(c.readyState==="complete")return setTimeout(e.ready,1);if(c.addEventListener)c.addEventListener("DOMContentLoaded",B,!1),a.addEventListener("load",e.ready,!1);else if(c.attachEvent){c.attachEvent("onreadystatechange",B),a.attachEvent("onload",e.ready);var b=!1;try{b=a.frameElement==null}catch(d){}c.documentElement.doScroll&&b&&J()}}},isFunction:function(a){return e.type(a)==="function"},isArray:Array.isArray||function(a){return e.type(a)==="array"},isWindow:function(a){return a&&typeof a=="object"&&"setInterval"in a},isNumeric:function(a){return!isNaN(parseFloat(a))&&isFinite(a)},type:function(a){return a==null?String(a):I[C.call(a)]||"object"},isPlainObject:function(a){if(!a||e.type(a)!=="object"||a.nodeType||e.isWindow(a))return!1;try{if(a.constructor&&!D.call(a,"constructor")&&!D.call(a.constructor.prototype,"isPrototypeOf"))return!1}catch(c){return!1}var d;for(d in a);return d===b||D.call(a,d)},isEmptyObject:function(a){for(var b in a)return!1;return!0},error:function(a){throw new Error(a)},parseJSON:function(b){if(typeof b!="string"||!b)return null;b=e.trim(b);if(a.JSON&&a.JSON.parse)return a.JSON.parse(b);if(n.test(b.replace(o,"@").replace(p,"]").replace(q,"")))return(new Function("return "+b))();e.error("Invalid JSON: "+b)},parseXML:function(c){var d,f;try{a.DOMParser?(f=new DOMParser,d=f.parseFromString(c,"text/xml")):(d=new ActiveXObject("Microsoft.XMLDOM"),d.async="false",d.loadXML(c))}catch(g){d=b}(!d||!d.documentElement||d.getElementsByTagName("parsererror").length)&&e.error("Invalid XML: "+c);return d},noop:function(){},globalEval:function(b){b&&j.test(b)&&(a.execScript||function(b){a.eval.call(a,b)})(b)},camelCase:function(a){return a.replace(w,"ms-").replace(v,x)},nodeName:function(a,b){return a.nodeName&&a.nodeName.toUpperCase()===b.toUpperCase()},each:function(a,c,d){var f,g=0,h=a.length,i=h===b||e.isFunction(a);if(d){if(i){for(f in a)if(c.apply(a[f],d)===!1)break}else for(;g0&&a[0]&&a[j-1]||j===0||e.isArray(a));if(k)for(;i1?i.call(arguments,0):b,j.notifyWith(k,e)}}function l(a){return function(c){b[a]=arguments.length>1?i.call(arguments,0):c,--g||j.resolveWith(j,b)}}var b=i.call(arguments,0),c=0,d=b.length,e=Array(d),g=d,h=d,j=d<=1&&a&&f.isFunction(a.promise)?a:f.Deferred(),k=j.promise();if(d>1){for(;c
    a",d=q.getElementsByTagName("*"),e=q.getElementsByTagName("a")[0];if(!d||!d.length||!e)return{};g=c.createElement("select"),h=g.appendChild(c.createElement("option")),i=q.getElementsByTagName("input")[0],b={leadingWhitespace:q.firstChild.nodeType===3,tbody:!q.getElementsByTagName("tbody").length,htmlSerialize:!!q.getElementsByTagName("link").length,style:/top/.test(e.getAttribute("style")),hrefNormalized:e.getAttribute("href")==="/a",opacity:/^0.55/.test(e.style.opacity),cssFloat:!!e.style.cssFloat,checkOn:i.value==="on",optSelected:h.selected,getSetAttribute:q.className!=="t",enctype:!!c.createElement("form").enctype,html5Clone:c.createElement("nav").cloneNode(!0).outerHTML!=="<:nav>",submitBubbles:!0,changeBubbles:!0,focusinBubbles:!1,deleteExpando:!0,noCloneEvent:!0,inlineBlockNeedsLayout:!1,shrinkWrapBlocks:!1,reliableMarginRight:!0},i.checked=!0,b.noCloneChecked=i.cloneNode(!0).checked,g.disabled=!0,b.optDisabled=!h.disabled;try{delete q.test}catch(s){b.deleteExpando=!1}!q.addEventListener&&q.attachEvent&&q.fireEvent&&(q.attachEvent("onclick",function(){b.noCloneEvent=!1}),q.cloneNode(!0).fireEvent("onclick")),i=c.createElement("input"),i.value="t",i.setAttribute("type","radio"),b.radioValue=i.value==="t",i.setAttribute("checked","checked"),q.appendChild(i),k=c.createDocumentFragment(),k.appendChild(q.lastChild),b.checkClone=k.cloneNode(!0).cloneNode(!0).lastChild.checked,b.appendChecked=i.checked,k.removeChild(i),k.appendChild(q),q.innerHTML="",a.getComputedStyle&&(j=c.createElement("div"),j.style.width="0",j.style.marginRight="0",q.style.width="2px",q.appendChild(j),b.reliableMarginRight=(parseInt((a.getComputedStyle(j,null)||{marginRight:0}).marginRight,10)||0)===0);if(q.attachEvent)for(o in{submit:1,change:1,focusin:1})n="on"+o,p=n in q,p||(q.setAttribute(n,"return;"),p=typeof q[n]=="function"),b[o+"Bubbles"]=p;k.removeChild(q),k=g=h=j=q=i=null,f(function(){var a,d,e,g,h,i,j,k,m,n,o,r=c.getElementsByTagName("body")[0];!r||(j=1,k="position:absolute;top:0;left:0;width:1px;height:1px;margin:0;",m="visibility:hidden;border:0;",n="style='"+k+"border:5px solid #000;padding:0;'",o="
    "+""+"
    ",a=c.createElement("div"),a.style.cssText=m+"width:0;height:0;position:static;top:0;margin-top:"+j+"px",r.insertBefore(a,r.firstChild),q=c.createElement("div"),a.appendChild(q),q.innerHTML="
    t
    ",l=q.getElementsByTagName("td"),p=l[0].offsetHeight===0,l[0].style.display="",l[1].style.display="none",b.reliableHiddenOffsets=p&&l[0].offsetHeight===0,q.innerHTML="",q.style.width=q.style.paddingLeft="1px",f.boxModel=b.boxModel=q.offsetWidth===2,typeof q.style.zoom!="undefined"&&(q.style.display="inline",q.style.zoom=1,b.inlineBlockNeedsLayout=q.offsetWidth===2,q.style.display="",q.innerHTML="
    ",b.shrinkWrapBlocks=q.offsetWidth!==2),q.style.cssText=k+m,q.innerHTML=o,d=q.firstChild,e=d.firstChild,h=d.nextSibling.firstChild.firstChild,i={doesNotAddBorder:e.offsetTop!==5,doesAddBorderForTableAndCells:h.offsetTop===5},e.style.position="fixed",e.style.top="20px",i.fixedPosition=e.offsetTop===20||e.offsetTop===15,e.style.position=e.style.top="",d.style.overflow="hidden",d.style.position="relative",i.subtractsBorderForOverflowNotVisible=e.offsetTop===-5,i.doesNotIncludeMarginInBodyOffset=r.offsetTop!==j,r.removeChild(a),q=a=null,f.extend(b,i))});return b}();var j=/^(?:\{.*\}|\[.*\])$/,k=/([A-Z])/g;f.extend({cache:{},uuid:0,expando:"jQuery"+(f.fn.jquery+Math.random()).replace(/\D/g,""),noData:{embed:!0,object:"clsid:D27CDB6E-AE6D-11cf-96B8-444553540000",applet:!0},hasData:function(a){a=a.nodeType?f.cache[a[f.expando]]:a[f.expando];return!!a&&!m(a)},data:function(a,c,d,e){if(!!f.acceptData(a)){var g,h,i,j=f.expando,k=typeof c=="string",l=a.nodeType,m=l?f.cache:a,n=l?a[j]:a[j]&&j,o=c==="events";if((!n||!m[n]||!o&&!e&&!m[n].data)&&k&&d===b)return;n||(l?a[j]=n=++f.uuid:n=j),m[n]||(m[n]={},l||(m[n].toJSON=f.noop));if(typeof c=="object"||typeof c=="function")e?m[n]=f.extend(m[n],c):m[n].data=f.extend(m[n].data,c);g=h=m[n],e||(h.data||(h.data={}),h=h.data),d!==b&&(h[f.camelCase(c)]=d);if(o&&!h[c])return g.events;k?(i=h[c],i==null&&(i=h[f.camelCase(c)])):i=h;return i}},removeData:function(a,b,c){if(!!f.acceptData(a)){var d,e,g,h=f.expando,i=a.nodeType,j=i?f.cache:a,k=i?a[h]:h;if(!j[k])return;if(b){d=c?j[k]:j[k].data;if(d){f.isArray(b)||(b in d?b=[b]:(b=f.camelCase(b),b in d?b=[b]:b=b.split(" ")));for(e=0,g=b.length;e-1)return!0;return!1},val:function(a){var c,d,e,g=this[0];{if(!!arguments.length){e=f.isFunction(a);return this.each(function(d){var g=f(this),h;if(this.nodeType===1){e?h=a.call(this,d,g.val()):h=a,h==null?h="":typeof h=="number"?h+="":f.isArray(h)&&(h=f.map(h,function(a){return a==null?"":a+""})),c=f.valHooks[this.nodeName.toLowerCase()]||f.valHooks[this.type];if(!c||!("set"in c)||c.set(this,h,"value")===b)this.value=h}})}if(g){c=f.valHooks[g.nodeName.toLowerCase()]||f.valHooks[g.type];if(c&&"get"in c&&(d=c.get(g,"value"))!==b)return d;d=g.value;return typeof d=="string"?d.replace(q,""):d==null?"":d}}}}),f.extend({valHooks:{option:{get:function(a){var b=a.attributes.value;return!b||b.specified?a.value:a.text}},select:{get:function(a){var b,c,d,e,g=a.selectedIndex,h=[],i=a.options,j=a.type==="select-one";if(g<0)return null;c=j?g:0,d=j?g+1:i.length;for(;c=0}),c.length||(a.selectedIndex=-1);return c}}},attrFn:{val:!0,css:!0,html:!0,text:!0,data:!0,width:!0,height:!0,offset:!0},attr:function(a,c,d,e){var g,h,i,j=a.nodeType;if(!!a&&j!==3&&j!==8&&j!==2){if(e&&c in f.attrFn)return f(a)[c](d);if(typeof a.getAttribute=="undefined")return f.prop(a,c,d);i=j!==1||!f.isXMLDoc(a),i&&(c=c.toLowerCase(),h=f.attrHooks[c]||(u.test(c)?x:w));if(d!==b){if(d===null){f.removeAttr(a,c);return}if(h&&"set"in h&&i&&(g=h.set(a,d,c))!==b)return g;a.setAttribute(c,""+d);return d}if(h&&"get"in h&&i&&(g=h.get(a,c))!==null)return g;g=a.getAttribute(c);return g===null?b:g}},removeAttr:function(a,b){var c,d,e,g,h=0;if(b&&a.nodeType===1){d=b.toLowerCase().split(p),g=d.length;for(;h=0}})});var z=/^(?:textarea|input|select)$/i,A=/^([^\.]*)?(?:\.(.+))?$/,B=/\bhover(\.\S+)?\b/,C=/^key/,D=/^(?:mouse|contextmenu)|click/,E=/^(?:focusinfocus|focusoutblur)$/,F=/^(\w*)(?:#([\w\-]+))?(?:\.([\w\-]+))?$/,G=function(a){var b=F.exec(a);b&&(b[1]=(b[1]||"").toLowerCase(),b[3]=b[3]&&new RegExp("(?:^|\\s)"+b[3]+"(?:\\s|$)"));return b},H=function(a,b){var c=a.attributes||{};return(!b[1]||a.nodeName.toLowerCase()===b[1])&&(!b[2]||(c.id||{}).value===b[2])&&(!b[3]||b[3].test((c["class"]||{}).value))},I=function(a){return f.event.special.hover?a:a.replace(B,"mouseenter$1 mouseleave$1")}; +f.event={add:function(a,c,d,e,g){var h,i,j,k,l,m,n,o,p,q,r,s;if(!(a.nodeType===3||a.nodeType===8||!c||!d||!(h=f._data(a)))){d.handler&&(p=d,d=p.handler),d.guid||(d.guid=f.guid++),j=h.events,j||(h.events=j={}),i=h.handle,i||(h.handle=i=function(a){return typeof f!="undefined"&&(!a||f.event.triggered!==a.type)?f.event.dispatch.apply(i.elem,arguments):b},i.elem=a),c=f.trim(I(c)).split(" ");for(k=0;k=0&&(h=h.slice(0,-1),k=!0),h.indexOf(".")>=0&&(i=h.split("."),h=i.shift(),i.sort());if((!e||f.event.customEvent[h])&&!f.event.global[h])return;c=typeof c=="object"?c[f.expando]?c:new f.Event(h,c):new f.Event(h),c.type=h,c.isTrigger=!0,c.exclusive=k,c.namespace=i.join("."),c.namespace_re=c.namespace?new RegExp("(^|\\.)"+i.join("\\.(?:.*\\.)?")+"(\\.|$)"):null,o=h.indexOf(":")<0?"on"+h:"";if(!e){j=f.cache;for(l in j)j[l].events&&j[l].events[h]&&f.event.trigger(c,d,j[l].handle.elem,!0);return}c.result=b,c.target||(c.target=e),d=d!=null?f.makeArray(d):[],d.unshift(c),p=f.event.special[h]||{};if(p.trigger&&p.trigger.apply(e,d)===!1)return;r=[[e,p.bindType||h]];if(!g&&!p.noBubble&&!f.isWindow(e)){s=p.delegateType||h,m=E.test(s+h)?e:e.parentNode,n=null;for(;m;m=m.parentNode)r.push([m,s]),n=m;n&&n===e.ownerDocument&&r.push([n.defaultView||n.parentWindow||a,s])}for(l=0;le&&i.push({elem:this,matches:d.slice(e)});for(j=0;j0?this.on(b,null,a,c):this.trigger(b)},f.attrFn&&(f.attrFn[b]=!0),C.test(b)&&(f.event.fixHooks[b]=f.event.keyHooks),D.test(b)&&(f.event.fixHooks[b]=f.event.mouseHooks)}),function(){function x(a,b,c,e,f,g){for(var h=0,i=e.length;h0){k=j;break}}j=j[a]}e[h]=k}}}function w(a,b,c,e,f,g){for(var h=0,i=e.length;h+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g,d="sizcache"+(Math.random()+"").replace(".",""),e=0,g=Object.prototype.toString,h=!1,i=!0,j=/\\/g,k=/\r\n/g,l=/\W/;[0,0].sort(function(){i=!1;return 0});var m=function(b,d,e,f){e=e||[],d=d||c;var h=d;if(d.nodeType!==1&&d.nodeType!==9)return[];if(!b||typeof b!="string")return e;var i,j,k,l,n,q,r,t,u=!0,v=m.isXML(d),w=[],x=b;do{a.exec(""),i=a.exec(x);if(i){x=i[3],w.push(i[1]);if(i[2]){l=i[3];break}}}while(i);if(w.length>1&&p.exec(b))if(w.length===2&&o.relative[w[0]])j=y(w[0]+w[1],d,f);else{j=o.relative[w[0]]?[d]:m(w.shift(),d);while(w.length)b=w.shift(),o.relative[b]&&(b+=w.shift()),j=y(b,j,f)}else{!f&&w.length>1&&d.nodeType===9&&!v&&o.match.ID.test(w[0])&&!o.match.ID.test(w[w.length-1])&&(n=m.find(w.shift(),d,v),d=n.expr?m.filter(n.expr,n.set)[0]:n.set[0]);if(d){n=f?{expr:w.pop(),set:s(f)}:m.find(w.pop(),w.length===1&&(w[0]==="~"||w[0]==="+")&&d.parentNode?d.parentNode:d,v),j=n.expr?m.filter(n.expr,n.set):n.set,w.length>0?k=s(j):u=!1;while(w.length)q=w.pop(),r=q,o.relative[q]?r=w.pop():q="",r==null&&(r=d),o.relative[q](k,r,v)}else k=w=[]}k||(k=j),k||m.error(q||b);if(g.call(k)==="[object Array]")if(!u)e.push.apply(e,k);else if(d&&d.nodeType===1)for(t=0;k[t]!=null;t++)k[t]&&(k[t]===!0||k[t].nodeType===1&&m.contains(d,k[t]))&&e.push(j[t]);else for(t=0;k[t]!=null;t++)k[t]&&k[t].nodeType===1&&e.push(j[t]);else s(k,e);l&&(m(l,h,e,f),m.uniqueSort(e));return e};m.uniqueSort=function(a){if(u){h=i,a.sort(u);if(h)for(var b=1;b0},m.find=function(a,b,c){var d,e,f,g,h,i;if(!a)return[];for(e=0,f=o.order.length;e":function(a,b){var c,d=typeof b=="string",e=0,f=a.length;if(d&&!l.test(b)){b=b.toLowerCase();for(;e=0)?c||d.push(h):c&&(b[g]=!1));return!1},ID:function(a){return a[1].replace(j,"")},TAG:function(a,b){return a[1].replace(j,"").toLowerCase()},CHILD:function(a){if(a[1]==="nth"){a[2]||m.error(a[0]),a[2]=a[2].replace(/^\+|\s*/g,"");var b=/(-?)(\d*)(?:n([+\-]?\d*))?/.exec(a[2]==="even"&&"2n"||a[2]==="odd"&&"2n+1"||!/\D/.test(a[2])&&"0n+"+a[2]||a[2]);a[2]=b[1]+(b[2]||1)-0,a[3]=b[3]-0}else a[2]&&m.error(a[0]);a[0]=e++;return a},ATTR:function(a,b,c,d,e,f){var g=a[1]=a[1].replace(j,"");!f&&o.attrMap[g]&&(a[1]=o.attrMap[g]),a[4]=(a[4]||a[5]||"").replace(j,""),a[2]==="~="&&(a[4]=" "+a[4]+" ");return a},PSEUDO:function(b,c,d,e,f){if(b[1]==="not")if((a.exec(b[3])||"").length>1||/^\w/.test(b[3]))b[3]=m(b[3],null,null,c);else{var g=m.filter(b[3],c,d,!0^f);d||e.push.apply(e,g);return!1}else if(o.match.POS.test(b[0])||o.match.CHILD.test(b[0]))return!0;return b},POS:function(a){a.unshift(!0);return a}},filters:{enabled:function(a){return a.disabled===!1&&a.type!=="hidden"},disabled:function(a){return a.disabled===!0},checked:function(a){return a.checked===!0},selected:function(a){a.parentNode&&a.parentNode.selectedIndex;return a.selected===!0},parent:function(a){return!!a.firstChild},empty:function(a){return!a.firstChild},has:function(a,b,c){return!!m(c[3],a).length},header:function(a){return/h\d/i.test(a.nodeName)},text:function(a){var b=a.getAttribute("type"),c=a.type;return a.nodeName.toLowerCase()==="input"&&"text"===c&&(b===c||b===null)},radio:function(a){return a.nodeName.toLowerCase()==="input"&&"radio"===a.type},checkbox:function(a){return a.nodeName.toLowerCase()==="input"&&"checkbox"===a.type},file:function(a){return a.nodeName.toLowerCase()==="input"&&"file"===a.type},password:function(a){return a.nodeName.toLowerCase()==="input"&&"password"===a.type},submit:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"submit"===a.type},image:function(a){return a.nodeName.toLowerCase()==="input"&&"image"===a.type},reset:function(a){var b=a.nodeName.toLowerCase();return(b==="input"||b==="button")&&"reset"===a.type},button:function(a){var b=a.nodeName.toLowerCase();return b==="input"&&"button"===a.type||b==="button"},input:function(a){return/input|select|textarea|button/i.test(a.nodeName)},focus:function(a){return a===a.ownerDocument.activeElement}},setFilters:{first:function(a,b){return b===0},last:function(a,b,c,d){return b===d.length-1},even:function(a,b){return b%2===0},odd:function(a,b){return b%2===1},lt:function(a,b,c){return bc[3]-0},nth:function(a,b,c){return c[3]-0===b},eq:function(a,b,c){return c[3]-0===b}},filter:{PSEUDO:function(a,b,c,d){var e=b[1],f=o.filters[e];if(f)return f(a,c,b,d);if(e==="contains")return(a.textContent||a.innerText||n([a])||"").indexOf(b[3])>=0;if(e==="not"){var g=b[3];for(var h=0,i=g.length;h=0}},ID:function(a,b){return a.nodeType===1&&a.getAttribute("id")===b},TAG:function(a,b){return b==="*"&&a.nodeType===1||!!a.nodeName&&a.nodeName.toLowerCase()===b},CLASS:function(a,b){return(" "+(a.className||a.getAttribute("class"))+" ").indexOf(b)>-1},ATTR:function(a,b){var c=b[1],d=m.attr?m.attr(a,c):o.attrHandle[c]?o.attrHandle[c](a):a[c]!=null?a[c]:a.getAttribute(c),e=d+"",f=b[2],g=b[4];return d==null?f==="!=":!f&&m.attr?d!=null:f==="="?e===g:f==="*="?e.indexOf(g)>=0:f==="~="?(" "+e+" ").indexOf(g)>=0:g?f==="!="?e!==g:f==="^="?e.indexOf(g)===0:f==="$="?e.substr(e.length-g.length)===g:f==="|="?e===g||e.substr(0,g.length+1)===g+"-":!1:e&&d!==!1},POS:function(a,b,c,d){var e=b[2],f=o.setFilters[e];if(f)return f(a,c,b,d)}}},p=o.match.POS,q=function(a,b){return"\\"+(b-0+1)};for(var r in o.match)o.match[r]=new RegExp(o.match[r].source+/(?![^\[]*\])(?![^\(]*\))/.source),o.leftMatch[r]=new RegExp(/(^(?:.|\r|\n)*?)/.source+o.match[r].source.replace(/\\(\d+)/g,q));var s=function(a,b){a=Array.prototype.slice.call(a,0);if(b){b.push.apply(b,a);return b}return a};try{Array.prototype.slice.call(c.documentElement.childNodes,0)[0].nodeType}catch(t){s=function(a,b){var c=0,d=b||[];if(g.call(a)==="[object Array]")Array.prototype.push.apply(d,a);else if(typeof a.length=="number")for(var e=a.length;c",e.insertBefore(a,e.firstChild),c.getElementById(d)&&(o.find.ID=function(a,c,d){if(typeof c.getElementById!="undefined"&&!d){var e=c.getElementById(a[1]);return e?e.id===a[1]||typeof e.getAttributeNode!="undefined"&&e.getAttributeNode("id").nodeValue===a[1]?[e]:b:[]}},o.filter.ID=function(a,b){var c=typeof a.getAttributeNode!="undefined"&&a.getAttributeNode("id");return a.nodeType===1&&c&&c.nodeValue===b}),e.removeChild(a),e=a=null}(),function(){var a=c.createElement("div");a.appendChild(c.createComment("")),a.getElementsByTagName("*").length>0&&(o.find.TAG=function(a,b){var c=b.getElementsByTagName(a[1]);if(a[1]==="*"){var d=[];for(var e=0;c[e];e++)c[e].nodeType===1&&d.push(c[e]);c=d}return c}),a.innerHTML="",a.firstChild&&typeof a.firstChild.getAttribute!="undefined"&&a.firstChild.getAttribute("href")!=="#"&&(o.attrHandle.href=function(a){return a.getAttribute("href",2)}),a=null}(),c.querySelectorAll&&function(){var a=m,b=c.createElement("div"),d="__sizzle__";b.innerHTML="

    ";if(!b.querySelectorAll||b.querySelectorAll(".TEST").length!==0){m=function(b,e,f,g){e=e||c;if(!g&&!m.isXML(e)){var h=/^(\w+$)|^\.([\w\-]+$)|^#([\w\-]+$)/.exec(b);if(h&&(e.nodeType===1||e.nodeType===9)){if(h[1])return s(e.getElementsByTagName(b),f);if(h[2]&&o.find.CLASS&&e.getElementsByClassName)return s(e.getElementsByClassName(h[2]),f)}if(e.nodeType===9){if(b==="body"&&e.body)return s([e.body],f);if(h&&h[3]){var i=e.getElementById(h[3]);if(!i||!i.parentNode)return s([],f);if(i.id===h[3])return s([i],f)}try{return s(e.querySelectorAll(b),f)}catch(j){}}else if(e.nodeType===1&&e.nodeName.toLowerCase()!=="object"){var k=e,l=e.getAttribute("id"),n=l||d,p=e.parentNode,q=/^\s*[+~]/.test(b);l?n=n.replace(/'/g,"\\$&"):e.setAttribute("id",n),q&&p&&(e=e.parentNode);try{if(!q||p)return s(e.querySelectorAll("[id='"+n+"'] "+b),f)}catch(r){}finally{l||k.removeAttribute("id")}}}return a(b,e,f,g)};for(var e in a)m[e]=a[e];b=null}}(),function(){var a=c.documentElement,b=a.matchesSelector||a.mozMatchesSelector||a.webkitMatchesSelector||a.msMatchesSelector;if(b){var d=!b.call(c.createElement("div"),"div"),e=!1;try{b.call(c.documentElement,"[test!='']:sizzle")}catch(f){e=!0}m.matchesSelector=function(a,c){c=c.replace(/\=\s*([^'"\]]*)\s*\]/g,"='$1']");if(!m.isXML(a))try{if(e||!o.match.PSEUDO.test(c)&&!/!=/.test(c)){var f=b.call(a,c);if(f||!d||a.document&&a.document.nodeType!==11)return f}}catch(g){}return m(c,null,null,[a]).length>0}}}(),function(){var a=c.createElement("div");a.innerHTML="
    ";if(!!a.getElementsByClassName&&a.getElementsByClassName("e").length!==0){a.lastChild.className="e";if(a.getElementsByClassName("e").length===1)return;o.order.splice(1,0,"CLASS"),o.find.CLASS=function(a,b,c){if(typeof b.getElementsByClassName!="undefined"&&!c)return b.getElementsByClassName(a[1])},a=null}}(),c.documentElement.contains?m.contains=function(a,b){return a!==b&&(a.contains?a.contains(b):!0)}:c.documentElement.compareDocumentPosition?m.contains=function(a,b){return!!(a.compareDocumentPosition(b)&16)}:m.contains=function(){return!1},m.isXML=function(a){var b=(a?a.ownerDocument||a:0).documentElement;return b?b.nodeName!=="HTML":!1};var y=function(a,b,c){var d,e=[],f="",g=b.nodeType?[b]:b;while(d=o.match.PSEUDO.exec(a))f+=d[0],a=a.replace(o.match.PSEUDO,"");a=o.relative[a]?a+"*":a;for(var h=0,i=g.length;h0)for(h=g;h=0:f.filter(a,this).length>0:this.filter(a).length>0)},closest:function(a,b){var c=[],d,e,g=this[0];if(f.isArray(a)){var h=1;while(g&&g.ownerDocument&&g!==b){for(d=0;d-1:f.find.matchesSelector(g,a)){c.push(g);break}g=g.parentNode;if(!g||!g.ownerDocument||g===b||g.nodeType===11)break}}c=c.length>1?f.unique(c):c;return this.pushStack(c,"closest",a)},index:function(a){if(!a)return this[0]&&this[0].parentNode?this.prevAll().length:-1;if(typeof a=="string")return f.inArray(this[0],f(a));return f.inArray(a.jquery?a[0]:a,this)},add:function(a,b){var c=typeof a=="string"?f(a,b):f.makeArray(a&&a.nodeType?[a]:a),d=f.merge(this.get(),c);return this.pushStack(S(c[0])||S(d[0])?d:f.unique(d))},andSelf:function(){return this.add(this.prevObject)}}),f.each({parent:function(a){var b=a.parentNode;return b&&b.nodeType!==11?b:null},parents:function(a){return f.dir(a,"parentNode")},parentsUntil:function(a,b,c){return f.dir(a,"parentNode",c)},next:function(a){return f.nth(a,2,"nextSibling")},prev:function(a){return f.nth(a,2,"previousSibling")},nextAll:function(a){return f.dir(a,"nextSibling")},prevAll:function(a){return f.dir(a,"previousSibling")},nextUntil:function(a,b,c){return f.dir(a,"nextSibling",c)},prevUntil:function(a,b,c){return f.dir(a,"previousSibling",c)},siblings:function(a){return f.sibling(a.parentNode.firstChild,a)},children:function(a){return f.sibling(a.firstChild)},contents:function(a){return f.nodeName(a,"iframe")?a.contentDocument||a.contentWindow.document:f.makeArray(a.childNodes)}},function(a,b){f.fn[a]=function(c,d){var e=f.map(this,b,c);L.test(a)||(d=c),d&&typeof d=="string"&&(e=f.filter(d,e)),e=this.length>1&&!R[a]?f.unique(e):e,(this.length>1||N.test(d))&&M.test(a)&&(e=e.reverse());return this.pushStack(e,a,P.call(arguments).join(","))}}),f.extend({filter:function(a,b,c){c&&(a=":not("+a+")");return b.length===1?f.find.matchesSelector(b[0],a)?[b[0]]:[]:f.find.matches(a,b)},dir:function(a,c,d){var e=[],g=a[c];while(g&&g.nodeType!==9&&(d===b||g.nodeType!==1||!f(g).is(d)))g.nodeType===1&&e.push(g),g=g[c];return e},nth:function(a,b,c,d){b=b||1;var e=0;for(;a;a=a[c])if(a.nodeType===1&&++e===b)break;return a},sibling:function(a,b){var c=[];for(;a;a=a.nextSibling)a.nodeType===1&&a!==b&&c.push(a);return c}});var V="abbr|article|aside|audio|canvas|datalist|details|figcaption|figure|footer|header|hgroup|mark|meter|nav|output|progress|section|summary|time|video",W=/ jQuery\d+="(?:\d+|null)"/g,X=/^\s+/,Y=/<(?!area|br|col|embed|hr|img|input|link|meta|param)(([\w:]+)[^>]*)\/>/ig,Z=/<([\w:]+)/,$=/",""],legend:[1,"
    ","
    "],thead:[1,"","
    "],tr:[2,"","
    "],td:[3,"","
    "],col:[2,"","
    "],area:[1,"",""],_default:[0,"",""]},bh=U(c);bg.optgroup=bg.option,bg.tbody=bg.tfoot=bg.colgroup=bg.caption=bg.thead,bg.th=bg.td,f.support.htmlSerialize||(bg._default=[1,"div
    ","
    "]),f.fn.extend({text:function(a){if(f.isFunction(a))return this.each(function(b){var c=f(this);c.text(a.call(this,b,c.text()))});if(typeof a!="object"&&a!==b)return this.empty().append((this[0]&&this[0].ownerDocument||c).createTextNode(a));return f.text(this)},wrapAll:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapAll(a.call(this,b))});if(this[0]){var b=f(a,this[0].ownerDocument).eq(0).clone(!0);this[0].parentNode&&b.insertBefore(this[0]),b.map(function(){var a=this;while(a.firstChild&&a.firstChild.nodeType===1)a=a.firstChild;return a}).append(this)}return this},wrapInner:function(a){if(f.isFunction(a))return this.each(function(b){f(this).wrapInner(a.call(this,b))});return this.each(function(){var b=f(this),c=b.contents();c.length?c.wrapAll(a):b.append(a)})},wrap:function(a){var b=f.isFunction(a);return this.each(function(c){f(this).wrapAll(b?a.call(this,c):a)})},unwrap:function(){return this.parent().each(function(){f.nodeName(this,"body")||f(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.appendChild(a)})},prepend:function(){return this.domManip(arguments,!0,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this)});if(arguments.length){var a=f.clean(arguments);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,!1,function(a){this.parentNode.insertBefore(a,this.nextSibling)});if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,f.clean(arguments));return a}},remove:function(a,b){for(var c=0,d;(d=this[c])!=null;c++)if(!a||f.filter(a,[d]).length)!b&&d.nodeType===1&&(f.cleanData(d.getElementsByTagName("*")),f.cleanData([d])),d.parentNode&&d.parentNode.removeChild(d);return this},empty:function() +{for(var a=0,b;(b=this[a])!=null;a++){b.nodeType===1&&f.cleanData(b.getElementsByTagName("*"));while(b.firstChild)b.removeChild(b.firstChild)}return this},clone:function(a,b){a=a==null?!1:a,b=b==null?a:b;return this.map(function(){return f.clone(this,a,b)})},html:function(a){if(a===b)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(W,""):null;if(typeof a=="string"&&!ba.test(a)&&(f.support.leadingWhitespace||!X.test(a))&&!bg[(Z.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Y,"<$1>");try{for(var c=0,d=this.length;c1&&l0?this.clone(!0):this).get();f(e[h])[b](j),d=d.concat(j)}return this.pushStack(d,a,e.selector)}}),f.extend({clone:function(a,b,c){var d,e,g,h=f.support.html5Clone||!bc.test("<"+a.nodeName)?a.cloneNode(!0):bo(a);if((!f.support.noCloneEvent||!f.support.noCloneChecked)&&(a.nodeType===1||a.nodeType===11)&&!f.isXMLDoc(a)){bk(a,h),d=bl(a),e=bl(h);for(g=0;d[g];++g)e[g]&&bk(d[g],e[g])}if(b){bj(a,h);if(c){d=bl(a),e=bl(h);for(g=0;d[g];++g)bj(d[g],e[g])}}d=e=null;return h},clean:function(a,b,d,e){var g;b=b||c,typeof b.createElement=="undefined"&&(b=b.ownerDocument||b[0]&&b[0].ownerDocument||c);var h=[],i;for(var j=0,k;(k=a[j])!=null;j++){typeof k=="number"&&(k+="");if(!k)continue;if(typeof k=="string")if(!_.test(k))k=b.createTextNode(k);else{k=k.replace(Y,"<$1>");var l=(Z.exec(k)||["",""])[1].toLowerCase(),m=bg[l]||bg._default,n=m[0],o=b.createElement("div");b===c?bh.appendChild(o):U(b).appendChild(o),o.innerHTML=m[1]+k+m[2];while(n--)o=o.lastChild;if(!f.support.tbody){var p=$.test(k),q=l==="table"&&!p?o.firstChild&&o.firstChild.childNodes:m[1]===""&&!p?o.childNodes:[];for(i=q.length-1;i>=0;--i)f.nodeName(q[i],"tbody")&&!q[i].childNodes.length&&q[i].parentNode.removeChild(q[i])}!f.support.leadingWhitespace&&X.test(k)&&o.insertBefore(b.createTextNode(X.exec(k)[0]),o.firstChild),k=o.childNodes}var r;if(!f.support.appendChecked)if(k[0]&&typeof (r=k.length)=="number")for(i=0;i=0)return b+"px"}}}),f.support.opacity||(f.cssHooks.opacity={get:function(a,b){return br.test((b&&a.currentStyle?a.currentStyle.filter:a.style.filter)||"")?parseFloat(RegExp.$1)/100+"":b?"1":""},set:function(a,b){var c=a.style,d=a.currentStyle,e=f.isNumeric(b)?"alpha(opacity="+b*100+")":"",g=d&&d.filter||c.filter||"";c.zoom=1;if(b>=1&&f.trim(g.replace(bq,""))===""){c.removeAttribute("filter");if(d&&!d.filter)return}c.filter=bq.test(g)?g.replace(bq,e):g+" "+e}}),f(function(){f.support.reliableMarginRight||(f.cssHooks.marginRight={get:function(a,b){var c;f.swap(a,{display:"inline-block"},function(){b?c=bz(a,"margin-right","marginRight"):c=a.style.marginRight});return c}})}),c.defaultView&&c.defaultView.getComputedStyle&&(bA=function(a,b){var c,d,e;b=b.replace(bs,"-$1").toLowerCase(),(d=a.ownerDocument.defaultView)&&(e=d.getComputedStyle(a,null))&&(c=e.getPropertyValue(b),c===""&&!f.contains(a.ownerDocument.documentElement,a)&&(c=f.style(a,b)));return c}),c.documentElement.currentStyle&&(bB=function(a,b){var c,d,e,f=a.currentStyle&&a.currentStyle[b],g=a.style;f===null&&g&&(e=g[b])&&(f=e),!bt.test(f)&&bu.test(f)&&(c=g.left,d=a.runtimeStyle&&a.runtimeStyle.left,d&&(a.runtimeStyle.left=a.currentStyle.left),g.left=b==="fontSize"?"1em":f||0,f=g.pixelLeft+"px",g.left=c,d&&(a.runtimeStyle.left=d));return f===""?"auto":f}),bz=bA||bB,f.expr&&f.expr.filters&&(f.expr.filters.hidden=function(a){var b=a.offsetWidth,c=a.offsetHeight;return b===0&&c===0||!f.support.reliableHiddenOffsets&&(a.style&&a.style.display||f.css(a,"display"))==="none"},f.expr.filters.visible=function(a){return!f.expr.filters.hidden(a)});var bD=/%20/g,bE=/\[\]$/,bF=/\r?\n/g,bG=/#.*$/,bH=/^(.*?):[ \t]*([^\r\n]*)\r?$/mg,bI=/^(?:color|date|datetime|datetime-local|email|hidden|month|number|password|range|search|tel|text|time|url|week)$/i,bJ=/^(?:about|app|app\-storage|.+\-extension|file|res|widget):$/,bK=/^(?:GET|HEAD)$/,bL=/^\/\//,bM=/\?/,bN=/)<[^<]*)*<\/script>/gi,bO=/^(?:select|textarea)/i,bP=/\s+/,bQ=/([?&])_=[^&]*/,bR=/^([\w\+\.\-]+:)(?:\/\/([^\/?#:]*)(?::(\d+))?)?/,bS=f.fn.load,bT={},bU={},bV,bW,bX=["*/"]+["*"];try{bV=e.href}catch(bY){bV=c.createElement("a"),bV.href="",bV=bV.href}bW=bR.exec(bV.toLowerCase())||[],f.fn.extend({load:function(a,c,d){if(typeof a!="string"&&bS)return bS.apply(this,arguments);if(!this.length)return this;var e=a.indexOf(" ");if(e>=0){var g=a.slice(e,a.length);a=a.slice(0,e)}var h="GET";c&&(f.isFunction(c)?(d=c,c=b):typeof c=="object"&&(c=f.param(c,f.ajaxSettings.traditional),h="POST"));var i=this;f.ajax({url:a,type:h,dataType:"html",data:c,complete:function(a,b,c){c=a.responseText,a.isResolved()&&(a.done(function(a){c=a}),i.html(g?f("
    ").append(c.replace(bN,"")).find(g):c)),d&&i.each(d,[c,b,a])}});return this},serialize:function(){return f.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?f.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||bO.test(this.nodeName)||bI.test(this.type))}).map(function(a,b){var c=f(this).val();return c==null?null:f.isArray(c)?f.map(c,function(a,c){return{name:b.name,value:a.replace(bF,"\r\n")}}):{name:b.name,value:c.replace(bF,"\r\n")}}).get()}}),f.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "),function(a,b){f.fn[b]=function(a){return this.on(b,a)}}),f.each(["get","post"],function(a,c){f[c]=function(a,d,e,g){f.isFunction(d)&&(g=g||e,e=d,d=b);return f.ajax({type:c,url:a,data:d,success:e,dataType:g})}}),f.extend({getScript:function(a,c){return f.get(a,b,c,"script")},getJSON:function(a,b,c){return f.get(a,b,c,"json")},ajaxSetup:function(a,b){b?b_(a,f.ajaxSettings):(b=a,a=f.ajaxSettings),b_(a,b);return a},ajaxSettings:{url:bV,isLocal:bJ.test(bW[1]),global:!0,type:"GET",contentType:"application/x-www-form-urlencoded",processData:!0,async:!0,accepts:{xml:"application/xml, text/xml",html:"text/html",text:"text/plain",json:"application/json, text/javascript","*":bX},contents:{xml:/xml/,html:/html/,json:/json/},responseFields:{xml:"responseXML",text:"responseText"},converters:{"* text":a.String,"text html":!0,"text json":f.parseJSON,"text xml":f.parseXML},flatOptions:{context:!0,url:!0}},ajaxPrefilter:bZ(bT),ajaxTransport:bZ(bU),ajax:function(a,c){function w(a,c,l,m){if(s!==2){s=2,q&&clearTimeout(q),p=b,n=m||"",v.readyState=a>0?4:0;var o,r,u,w=c,x=l?cb(d,v,l):b,y,z;if(a>=200&&a<300||a===304){if(d.ifModified){if(y=v.getResponseHeader("Last-Modified"))f.lastModified[k]=y;if(z=v.getResponseHeader("Etag"))f.etag[k]=z}if(a===304)w="notmodified",o=!0;else try{r=cc(d,x),w="success",o=!0}catch(A){w="parsererror",u=A}}else{u=w;if(!w||a)w="error",a<0&&(a=0)}v.status=a,v.statusText=""+(c||w),o?h.resolveWith(e,[r,w,v]):h.rejectWith(e,[v,w,u]),v.statusCode(j),j=b,t&&g.trigger("ajax"+(o?"Success":"Error"),[v,d,o?r:u]),i.fireWith(e,[v,w]),t&&(g.trigger("ajaxComplete",[v,d]),--f.active||f.event.trigger("ajaxStop"))}}typeof a=="object"&&(c=a,a=b),c=c||{};var d=f.ajaxSetup({},c),e=d.context||d,g=e!==d&&(e.nodeType||e instanceof f)?f(e):f.event,h=f.Deferred(),i=f.Callbacks("once memory"),j=d.statusCode||{},k,l={},m={},n,o,p,q,r,s=0,t,u,v={readyState:0,setRequestHeader:function(a,b){if(!s){var c=a.toLowerCase();a=m[c]=m[c]||a,l[a]=b}return this},getAllResponseHeaders:function(){return s===2?n:null},getResponseHeader:function(a){var c;if(s===2){if(!o){o={};while(c=bH.exec(n))o[c[1].toLowerCase()]=c[2]}c=o[a.toLowerCase()]}return c===b?null:c},overrideMimeType:function(a){s||(d.mimeType=a);return this},abort:function(a){a=a||"abort",p&&p.abort(a),w(0,a);return this}};h.promise(v),v.success=v.done,v.error=v.fail,v.complete=i.add,v.statusCode=function(a){if(a){var b;if(s<2)for(b in a)j[b]=[j[b],a[b]];else b=a[v.status],v.then(b,b)}return this},d.url=((a||d.url)+"").replace(bG,"").replace(bL,bW[1]+"//"),d.dataTypes=f.trim(d.dataType||"*").toLowerCase().split(bP),d.crossDomain==null&&(r=bR.exec(d.url.toLowerCase()),d.crossDomain=!(!r||r[1]==bW[1]&&r[2]==bW[2]&&(r[3]||(r[1]==="http:"?80:443))==(bW[3]||(bW[1]==="http:"?80:443)))),d.data&&d.processData&&typeof d.data!="string"&&(d.data=f.param(d.data,d.traditional)),b$(bT,d,c,v);if(s===2)return!1;t=d.global,d.type=d.type.toUpperCase(),d.hasContent=!bK.test(d.type),t&&f.active++===0&&f.event.trigger("ajaxStart");if(!d.hasContent){d.data&&(d.url+=(bM.test(d.url)?"&":"?")+d.data,delete d.data),k=d.url;if(d.cache===!1){var x=f.now(),y=d.url.replace(bQ,"$1_="+x);d.url=y+(y===d.url?(bM.test(d.url)?"&":"?")+"_="+x:"")}}(d.data&&d.hasContent&&d.contentType!==!1||c.contentType)&&v.setRequestHeader("Content-Type",d.contentType),d.ifModified&&(k=k||d.url,f.lastModified[k]&&v.setRequestHeader("If-Modified-Since",f.lastModified[k]),f.etag[k]&&v.setRequestHeader("If-None-Match",f.etag[k])),v.setRequestHeader("Accept",d.dataTypes[0]&&d.accepts[d.dataTypes[0]]?d.accepts[d.dataTypes[0]]+(d.dataTypes[0]!=="*"?", "+bX+"; q=0.01":""):d.accepts["*"]);for(u in d.headers)v.setRequestHeader(u,d.headers[u]);if(d.beforeSend&&(d.beforeSend.call(e,v,d)===!1||s===2)){v.abort();return!1}for(u in{success:1,error:1,complete:1})v[u](d[u]);p=b$(bU,d,c,v);if(!p)w(-1,"No Transport");else{v.readyState=1,t&&g.trigger("ajaxSend",[v,d]),d.async&&d.timeout>0&&(q=setTimeout(function(){v.abort("timeout")},d.timeout));try{s=1,p.send(l,w)}catch(z){if(s<2)w(-1,z);else throw z}}return v},param:function(a,c){var d=[],e=function(a,b){b=f.isFunction(b)?b():b,d[d.length]=encodeURIComponent(a)+"="+encodeURIComponent(b)};c===b&&(c=f.ajaxSettings.traditional);if(f.isArray(a)||a.jquery&&!f.isPlainObject(a))f.each(a,function(){e(this.name,this.value)});else for(var g in a)ca(g,a[g],c,e);return d.join("&").replace(bD,"+")}}),f.extend({active:0,lastModified:{},etag:{}});var cd=f.now(),ce=/(\=)\?(&|$)|\?\?/i;f.ajaxSetup({jsonp:"callback",jsonpCallback:function(){return f.expando+"_"+cd++}}),f.ajaxPrefilter("json jsonp",function(b,c,d){var e=b.contentType==="application/x-www-form-urlencoded"&&typeof b.data=="string";if(b.dataTypes[0]==="jsonp"||b.jsonp!==!1&&(ce.test(b.url)||e&&ce.test(b.data))){var g,h=b.jsonpCallback=f.isFunction(b.jsonpCallback)?b.jsonpCallback():b.jsonpCallback,i=a[h],j=b.url,k=b.data,l="$1"+h+"$2";b.jsonp!==!1&&(j=j.replace(ce,l),b.url===j&&(e&&(k=k.replace(ce,l)),b.data===k&&(j+=(/\?/.test(j)?"&":"?")+b.jsonp+"="+h))),b.url=j,b.data=k,a[h]=function(a){g=[a]},d.always(function(){a[h]=i,g&&f.isFunction(i)&&a[h](g[0])}),b.converters["script json"]=function(){g||f.error(h+" was not called");return g[0]},b.dataTypes[0]="json";return"script"}}),f.ajaxSetup({accepts:{script:"text/javascript, application/javascript, application/ecmascript, application/x-ecmascript"},contents:{script:/javascript|ecmascript/},converters:{"text script":function(a){f.globalEval(a);return a}}}),f.ajaxPrefilter("script",function(a){a.cache===b&&(a.cache=!1),a.crossDomain&&(a.type="GET",a.global=!1)}),f.ajaxTransport("script",function(a){if(a.crossDomain){var d,e=c.head||c.getElementsByTagName("head")[0]||c.documentElement;return{send:function(f,g){d=c.createElement("script"),d.async="async",a.scriptCharset&&(d.charset=a.scriptCharset),d.src=a.url,d.onload=d.onreadystatechange=function(a,c){if(c||!d.readyState||/loaded|complete/.test(d.readyState))d.onload=d.onreadystatechange=null,e&&d.parentNode&&e.removeChild(d),d=b,c||g(200,"success")},e.insertBefore(d,e.firstChild)},abort:function(){d&&d.onload(0,1)}}}});var cf=a.ActiveXObject?function(){for(var a in ch)ch[a](0,1)}:!1,cg=0,ch;f.ajaxSettings.xhr=a.ActiveXObject?function(){return!this.isLocal&&ci()||cj()}:ci,function(a){f.extend(f.support,{ajax:!!a,cors:!!a&&"withCredentials"in a})}(f.ajaxSettings.xhr()),f.support.ajax&&f.ajaxTransport(function(c){if(!c.crossDomain||f.support.cors){var d;return{send:function(e,g){var h=c.xhr(),i,j;c.username?h.open(c.type,c.url,c.async,c.username,c.password):h.open(c.type,c.url,c.async);if(c.xhrFields)for(j in c.xhrFields)h[j]=c.xhrFields[j];c.mimeType&&h.overrideMimeType&&h.overrideMimeType(c.mimeType),!c.crossDomain&&!e["X-Requested-With"]&&(e["X-Requested-With"]="XMLHttpRequest");try{for(j in e)h.setRequestHeader(j,e[j])}catch(k){}h.send(c.hasContent&&c.data||null),d=function(a,e){var j,k,l,m,n;try{if(d&&(e||h.readyState===4)){d=b,i&&(h.onreadystatechange=f.noop,cf&&delete ch[i]);if(e)h.readyState!==4&&h.abort();else{j=h.status,l=h.getAllResponseHeaders(),m={},n=h.responseXML,n&&n.documentElement&&(m.xml=n),m.text=h.responseText;try{k=h.statusText}catch(o){k=""}!j&&c.isLocal&&!c.crossDomain?j=m.text?200:404:j===1223&&(j=204)}}}catch(p){e||g(-1,p)}m&&g(j,k,m,l)},!c.async||h.readyState===4?d():(i=++cg,cf&&(ch||(ch={},f(a).unload(cf)),ch[i]=d),h.onreadystatechange=d)},abort:function(){d&&d(0,1)}}}});var ck={},cl,cm,cn=/^(?:toggle|show|hide)$/,co=/^([+\-]=)?([\d+.\-]+)([a-z%]*)$/i,cp,cq=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]],cr;f.fn.extend({show:function(a,b,c){var d,e;if(a||a===0)return this.animate(cu("show",3),a,b,c);for(var g=0,h=this.length;g=i.duration+this.startTime){this.now=this.end,this.pos=this.state=1,this.update(),i.animatedProperties[this.prop]=!0;for(b in i.animatedProperties)i.animatedProperties[b]!==!0&&(g=!1);if(g){i.overflow!=null&&!f.support.shrinkWrapBlocks&&f.each(["","X","Y"],function(a,b){h.style["overflow"+b]=i.overflow[a]}),i.hide&&f(h).hide();if(i.hide||i.show)for(b in i.animatedProperties)f.style(h,b,i.orig[b]),f.removeData(h,"fxshow"+b,!0),f.removeData(h,"toggle"+b,!0);d=i.complete,d&&(i.complete=!1,d.call(h))}return!1}i.duration==Infinity?this.now=e:(c=e-this.startTime,this.state=c/i.duration,this.pos=f.easing[i.animatedProperties[this.prop]](this.state,c,0,1,i.duration),this.now=this.start+(this.end-this.start)*this.pos),this.update();return!0}},f.extend(f.fx,{tick:function(){var a,b=f.timers,c=0;for(;c-1,k={},l={},m,n;j?(l=e.position(),m=l.top,n=l.left):(m=parseFloat(h)||0,n=parseFloat(i)||0),f.isFunction(b)&&(b=b.call(a,c,g)),b.top!=null&&(k.top=b.top-g.top+m),b.left!=null&&(k.left=b.left-g.left+n),"using"in b?b.using.call(a,k):e.css(k)}},f.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),c=this.offset(),d=cx.test(b[0].nodeName)?{top:0,left:0}:b.offset();c.top-=parseFloat(f.css(a,"marginTop"))||0,c.left-=parseFloat(f.css(a,"marginLeft"))||0,d.top+=parseFloat(f.css(b[0],"borderTopWidth"))||0,d.left+=parseFloat(f.css(b[0],"borderLeftWidth"))||0;return{top:c.top-d.top,left:c.left-d.left}},offsetParent:function(){return this.map(function(){var a=this.offsetParent||c.body;while(a&&!cx.test(a.nodeName)&&f.css(a,"position")==="static")a=a.offsetParent;return a})}}),f.each(["Left","Top"],function(a,c){var d="scroll"+c;f.fn[d]=function(c){var e,g;if(c===b){e=this[0];if(!e)return null;g=cy(e);return g?"pageXOffset"in g?g[a?"pageYOffset":"pageXOffset"]:f.support.boxModel&&g.document.documentElement[d]||g.document.body[d]:e[d]}return this.each(function(){g=cy(this),g?g.scrollTo(a?f(g).scrollLeft():c,a?c:f(g).scrollTop()):this[d]=c})}}),f.each(["Height","Width"],function(a,c){var d=c.toLowerCase();f.fn["inner"+c]=function(){var a=this[0];return a?a.style?parseFloat(f.css(a,d,"padding")):this[d]():null},f.fn["outer"+c]=function(a){var b=this[0];return b?b.style?parseFloat(f.css(b,d,a?"margin":"border")):this[d]():null},f.fn[d]=function(a){var e=this[0];if(!e)return a==null?null:this;if(f.isFunction(a))return this.each(function(b){var c=f(this);c[d](a.call(this,b,c[d]()))});if(f.isWindow(e)){var g=e.document.documentElement["client"+c],h=e.document.body;return e.document.compatMode==="CSS1Compat"&&g||h&&h["client"+c]||g}if(e.nodeType===9)return Math.max(e.documentElement["client"+c],e.body["scroll"+c],e.documentElement["scroll"+c],e.body["offset"+c],e.documentElement["offset"+c]);if(a===b){var i=f.css(e,d),j=parseFloat(i);return f.isNumeric(j)?j:i}return this.css(d,typeof a=="string"?a:a+"px")}}),a.jQuery=a.$=f,typeof define=="function"&&define.amd&&define.amd.jQuery&&define("jquery",[],function(){return f})})(window); \ No newline at end of file diff --git a/public_html/visual_demo/lib/json2.js b/public_html/visual_demo/lib/json2.js new file mode 100644 index 0000000..3b0c872 --- /dev/null +++ b/public_html/visual_demo/lib/json2.js @@ -0,0 +1,487 @@ +/* + json2.js + 2011-10-19 + + Public Domain. + + NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. + + See http://www.JSON.org/js.html + + + This code should be minified before deployment. + See http://javascript.crockford.com/jsmin.html + + USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO + NOT CONTROL. + + + This file creates a global JSON object containing two methods: stringify + and parse. + + JSON.stringify(value, replacer, space) + value any JavaScript value, usually an object or array. + + replacer an optional parameter that determines how object + values are stringified for objects. It can be a + function or an array of strings. + + space an optional parameter that specifies the indentation + of nested structures. If it is omitted, the text will + be packed without extra whitespace. If it is a number, + it will specify the number of spaces to indent at each + level. If it is a string (such as '\t' or ' '), + it contains the characters used to indent at each level. + + This method produces a JSON text from a JavaScript value. + + When an object value is found, if the object contains a toJSON + method, its toJSON method will be called and the result will be + stringified. A toJSON method does not serialize: it returns the + value represented by the name/value pair that should be serialized, + or undefined if nothing should be serialized. The toJSON method + will be passed the key associated with the value, and this will be + bound to the value + + For example, this would serialize Dates as ISO strings. + + Date.prototype.toJSON = function (key) { + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + return this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z'; + }; + + You can provide an optional replacer method. It will be passed the + key and value of each member, with this bound to the containing + object. The value that is returned from your method will be + serialized. If your method returns undefined, then the member will + be excluded from the serialization. + + If the replacer parameter is an array of strings, then it will be + used to select the members to be serialized. It filters the results + such that only members with keys listed in the replacer array are + stringified. + + Values that do not have JSON representations, such as undefined or + functions, will not be serialized. Such values in objects will be + dropped; in arrays they will be replaced with null. You can use + a replacer function to replace those with JSON values. + JSON.stringify(undefined) returns undefined. + + The optional space parameter produces a stringification of the + value that is filled with line breaks and indentation to make it + easier to read. + + If the space parameter is a non-empty string, then that string will + be used for indentation. If the space parameter is a number, then + the indentation will be that many spaces. + + Example: + + text = JSON.stringify(['e', {pluribus: 'unum'}]); + // text is '["e",{"pluribus":"unum"}]' + + + text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); + // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' + + text = JSON.stringify([new Date()], function (key, value) { + return this[key] instanceof Date ? + 'Date(' + this[key] + ')' : value; + }); + // text is '["Date(---current time---)"]' + + + JSON.parse(text, reviver) + This method parses a JSON text to produce an object or array. + It can throw a SyntaxError exception. + + The optional reviver parameter is a function that can filter and + transform the results. It receives each of the keys and values, + and its return value is used instead of the original value. + If it returns what it received, then the structure is not modified. + If it returns undefined then the member is deleted. + + Example: + + // Parse the text. Values that look like ISO date strings will + // be converted to Date objects. + + myData = JSON.parse(text, function (key, value) { + var a; + if (typeof value === 'string') { + a = +/^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); + if (a) { + return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], + +a[5], +a[6])); + } + } + return value; + }); + + myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { + var d; + if (typeof value === 'string' && + value.slice(0, 5) === 'Date(' && + value.slice(-1) === ')') { + d = new Date(value.slice(5, -1)); + if (d) { + return d; + } + } + return value; + }); + + + This is a reference implementation. You are free to copy, modify, or + redistribute. +*/ + +/*jslint evil: true, regexp: true */ + +/*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, + call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, + getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, + lastIndex, length, parse, prototype, push, replace, slice, stringify, + test, toJSON, toString, valueOf +*/ + + +// Create a JSON object only if one does not already exist. We create the +// methods in a closure to avoid creating global variables. + +var JSON; +if (!JSON) { + JSON = {}; +} + +(function () { + 'use strict'; + + function f(n) { + // Format integers to have at least two digits. + return n < 10 ? '0' + n : n; + } + + if (typeof Date.prototype.toJSON !== 'function') { + + Date.prototype.toJSON = function (key) { + + return isFinite(this.valueOf()) + ? this.getUTCFullYear() + '-' + + f(this.getUTCMonth() + 1) + '-' + + f(this.getUTCDate()) + 'T' + + f(this.getUTCHours()) + ':' + + f(this.getUTCMinutes()) + ':' + + f(this.getUTCSeconds()) + 'Z' + : null; + }; + + String.prototype.toJSON = + Number.prototype.toJSON = + Boolean.prototype.toJSON = function (key) { + return this.valueOf(); + }; + } + + var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, + gap, + indent, + meta = { // table of character substitutions + '\b': '\\b', + '\t': '\\t', + '\n': '\\n', + '\f': '\\f', + '\r': '\\r', + '"' : '\\"', + '\\': '\\\\' + }, + rep; + + + function quote(string) { + +// If the string contains no control characters, no quote characters, and no +// backslash characters, then we can safely slap some quotes around it. +// Otherwise we must also replace the offending characters with safe escape +// sequences. + + escapable.lastIndex = 0; + return escapable.test(string) ? '"' + string.replace(escapable, function (a) { + var c = meta[a]; + return typeof c === 'string' + ? c + : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }) + '"' : '"' + string + '"'; + } + + + function str(key, holder) { + +// Produce a string from holder[key]. + + var i, // The loop counter. + k, // The member key. + v, // The member value. + length, + mind = gap, + partial, + value = holder[key]; + +// If the value has a toJSON method, call it to obtain a replacement value. + + if (value && typeof value === 'object' && + typeof value.toJSON === 'function') { + value = value.toJSON(key); + } + +// If we were called with a replacer function, then call the replacer to +// obtain a replacement value. + + if (typeof rep === 'function') { + value = rep.call(holder, key, value); + } + +// What happens next depends on the value's type. + + switch (typeof value) { + case 'string': + return quote(value); + + case 'number': + +// JSON numbers must be finite. Encode non-finite numbers as null. + + return isFinite(value) ? String(value) : 'null'; + + case 'boolean': + case 'null': + +// If the value is a boolean or null, convert it to a string. Note: +// typeof null does not produce 'null'. The case is included here in +// the remote chance that this gets fixed someday. + + return String(value); + +// If the type is 'object', we might be dealing with an object or an array or +// null. + + case 'object': + +// Due to a specification blunder in ECMAScript, typeof null is 'object', +// so watch out for that case. + + if (!value) { + return 'null'; + } + +// Make an array to hold the partial results of stringifying this object value. + + gap += indent; + partial = []; + +// Is the value an array? + + if (Object.prototype.toString.apply(value) === '[object Array]') { + +// The value is an array. Stringify every element. Use null as a placeholder +// for non-JSON values. + + length = value.length; + for (i = 0; i < length; i += 1) { + partial[i] = str(i, value) || 'null'; + } + +// Join all of the elements together, separated with commas, and wrap them in +// brackets. + + v = partial.length === 0 + ? '[]' + : gap + ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' + : '[' + partial.join(',') + ']'; + gap = mind; + return v; + } + +// If the replacer is an array, use it to select the members to be stringified. + + if (rep && typeof rep === 'object') { + length = rep.length; + for (i = 0; i < length; i += 1) { + if (typeof rep[i] === 'string') { + k = rep[i]; + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } else { + +// Otherwise, iterate through all of the keys in the object. + + for (k in value) { + if (Object.prototype.hasOwnProperty.call(value, k)) { + v = str(k, value); + if (v) { + partial.push(quote(k) + (gap ? ': ' : ':') + v); + } + } + } + } + +// Join all of the member texts together, separated with commas, +// and wrap them in braces. + + v = partial.length === 0 + ? '{}' + : gap + ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' + : '{' + partial.join(',') + '}'; + gap = mind; + return v; + } + } + +// If the JSON object does not yet have a stringify method, give it one. + + if (typeof JSON.stringify !== 'function') { + JSON.stringify = function (value, replacer, space) { + +// The stringify method takes a value and an optional replacer, and an optional +// space parameter, and returns a JSON text. The replacer can be a function +// that can replace values, or an array of strings that will select the keys. +// A default replacer method can be provided. Use of the space parameter can +// produce text that is more easily readable. + + var i; + gap = ''; + indent = ''; + +// If the space parameter is a number, make an indent string containing that +// many spaces. + + if (typeof space === 'number') { + for (i = 0; i < space; i += 1) { + indent += ' '; + } + +// If the space parameter is a string, it will be used as the indent string. + + } else if (typeof space === 'string') { + indent = space; + } + +// If there is a replacer, it must be a function or an array. +// Otherwise, throw an error. + + rep = replacer; + if (replacer && typeof replacer !== 'function' && + (typeof replacer !== 'object' || + typeof replacer.length !== 'number')) { + throw new Error('JSON.stringify'); + } + +// Make a fake root object containing our value under the key of ''. +// Return the result of stringifying the value. + + return str('', {'': value}); + }; + } + + +// If the JSON object does not yet have a parse method, give it one. + + if (typeof JSON.parse !== 'function') { + JSON.parse = function (text, reviver) { + +// The parse method takes a text and an optional reviver function, and returns +// a JavaScript value if the text is a valid JSON text. + + var j; + + function walk(holder, key) { + +// The walk method is used to recursively walk the resulting structure so +// that modifications can be made. + + var k, v, value = holder[key]; + if (value && typeof value === 'object') { + for (k in value) { + if (Object.prototype.hasOwnProperty.call(value, k)) { + v = walk(value, k); + if (v !== undefined) { + value[k] = v; + } else { + delete value[k]; + } + } + } + } + return reviver.call(holder, key, value); + } + + +// Parsing happens in four stages. In the first stage, we replace certain +// Unicode characters with escape sequences. JavaScript handles many characters +// incorrectly, either silently deleting them, or treating them as line endings. + + text = String(text); + cx.lastIndex = 0; + if (cx.test(text)) { + text = text.replace(cx, function (a) { + return '\\u' + + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); + }); + } + +// In the second stage, we run the text against regular expressions that look +// for non-JSON patterns. We are especially concerned with '()' and 'new' +// because they can cause invocation, and '=' because it can cause mutation. +// But just to be safe, we want to reject all unexpected forms. + +// We split the second stage into 4 regexp operations in order to work around +// crippling inefficiencies in IE's and Safari's regexp engines. First we +// replace the JSON backslash pairs with '@' (a non-JSON character). Second, we +// replace all simple value tokens with ']' characters. Third, we delete all +// open brackets that follow a colon or comma or that begin the text. Finally, +// we look to see that the remaining characters are only whitespace or ']' or +// ',' or ':' or '{' or '}'. If that is so, then the text is safe for eval. + + if (/^[\],:{}\s]*$/ + .test(text.replace(/\\(?:["\\\/bfnrt]|u[0-9a-fA-F]{4})/g, '@') + .replace(/"[^"\\\n\r]*"|true|false|null|-?\d+(?:\.\d*)?(?:[eE][+\-]?\d+)?/g, ']') + .replace(/(?:^|:|,)(?:\s*\[)+/g, ''))) { + +// In the third stage we use the eval function to compile the text into a +// JavaScript structure. The '{' operator is subject to a syntactic ambiguity +// in JavaScript: it can begin a block or an object literal. We wrap the text +// in parens to eliminate the ambiguity. + + j = eval('(' + text + ')'); + +// In the optional fourth stage, we recursively walk the new structure, passing +// each name/value pair to a reviver function for possible transformation. + + return typeof reviver === 'function' + ? walk({'': j}, '') + : j; + } + +// If the text is not JSON parseable, then a SyntaxError is thrown. + + throw new SyntaxError('JSON.parse'); + }; + } +}()); diff --git a/public_html/visual_demo/news_domevent.py b/public_html/visual_demo/news_domevent.py new file mode 100755 index 0000000..d8b4d7c --- /dev/null +++ b/public_html/visual_demo/news_domevent.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python + +import zmq +import cgi, cgitb +import json +import sys +import time + +def main(): + cgitb.enable() + + zmqctx = zmq.Context() + zmqsock = zmqctx.socket(zmq.SUB) + zmqsock.setsockopt(zmq.HWM, 100) + #zmqsock.setsockopt(zmq.IDENTITY, 'web-dom-event') + zmqsock.setsockopt(zmq.SUBSCRIBE, '') + zmqsock.connect('tcp://maximus.ijs.si:1236') + + print "Content-Type: text/event-stream\n" + sys.stdout.flush() + + while True: + evt = zmqsock.recv_json() + print "event: news-event" + # print "id: " -- ce pade konekcija dobimo Last-Event-ID header s tem idjem. + #print "data: {aid: %s, title: %s, tags: %s, geo: %s}" % (evt[0], evt[1], evt[2], evt[3]) + print "data: %s\n" % json.dumps(evt) + sys.stdout.flush() + + +if __name__ == '__main__': + main() diff --git a/public_html/visual_demo/zmq2jsEvent.py b/public_html/visual_demo/zmq2jsEvent.py new file mode 100755 index 0000000..a05e2f1 --- /dev/null +++ b/public_html/visual_demo/zmq2jsEvent.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python + +import zmq +import cgi, cgitb +import json +import sys +import time, datetime +import random + +def utc_time_str(t): + "yyyy-mm-dd hh:mm:ss string representing the UTC variant of a given datetime object" + return datetime.datetime.utcfromtimestamp(time.mktime(t.replace(microsecond=0).timetuple())).isoformat().replace('T',' ')+' (UTC)' + +def main(): + cgitb.enable() + + zmqctx = zmq.Context() + zmqsock = zmqctx.socket(zmq.SUB) + zmqsock.setsockopt(zmq.SUBSCRIBE, '') + zmqsock.connect('tcp://kopernik.ijs.si:13374') + + print "Cache-Control: no-cache" + print "Connection: Keep-Alive" + print "Content-Type: text/event-stream" + print "\n" + sys.stdout.flush() + + t0 = time.time() + while time.time()-t0<10: + # get data + article = zmqsock.recv_pyobj() + + # ignore articles from non-public feeds + if 'public' not in article.get('acl_tagset', []): + continue + + # hackish: ignore outdated articles + age_days = (datetime.datetime.now() - (article['publish_date'] or article['found_date']).replace(tzinfo=None)).days + if age_days > 7: + continue + + # compute some pretty strings + txt = article['cleartext'] + txt = '

    '.join(txt.splitlines()) + gap_idx = min(txt.find(' ', 400), 500) + if gap_idx == -1: gap_idx = 500 + if len(txt) > gap_idx: + intro = txt[:gap_idx] + ' (...)' + else: + intro = txt + + # create a "javascript" event + print "id:", article['id'] + print "data: %s\n" % json.dumps({ + 'aid': article['id'], + 'url': article['url'], + 'feed_url': article['feed_url'], + 'title': article['title'], + 'intro': intro, + 'date': utc_time_str(article['publish_date'] or article['found_date']), + 'date_is_approx': article['publish_date'] is None, + 'geo': article['geo'], # or ('%f %f' % (360*random.random()-180, 360*random.random()-180)) # random part: debug only + 'pub_geo': article['source_geo'], + 'img': article['img'], + }) + sys.stdout.flush() + + +if __name__ == '__main__': + main() diff --git a/realtime_cleaner.py b/realtime_cleaner.py new file mode 100755 index 0000000..45cdb2b --- /dev/null +++ b/realtime_cleaner.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +import zmq +import time +from cleanDb import * +import signal, termios, fcntl + +def main(): + db, cur = openConnection('realtime cleaner') + db.autocommit = True # we don't need transactions + + zmqctx = zmq.Context() + zmqsock = zmqctx.socket(zmq.PULL) + zmqsock.setsockopt(zmq.HWM, 20) + zmqsock.connect('tcp://*:1234') + + info('loading langdet_nb.pck ...') + D = langdet.load_langdet_db('langdet/langid_nb.pck') + info('... done') + + while True: + id, headers, page = zmqsock.recv_multipart() + articleId = int(id) + + print 'got article', id + try: + #print "starting decode" + utf8 = handleSingleDecode(cur, articleId, headers, txt=page) + cleartext = handleSingleCleartext(cur, articleId, html=str(utf8), commit=False) + lc_iso, lc_alt = handleSingleLangdet_cld(cur, articleId, text=cleartext, commit=False) + if lc_iso is None: + used_blazn = True + lc_iso, lc_alt = handleSingleLangdet(cur, D, articleId, text=cleartext, ignore_cld_langs=True, commit=False) + else: + used_blazn = False + cur.connection.commit() # make cleartext and language known to the outside world at the same time + print " %5d bytes %s %s %s" % ( + len(cleartext), lc_iso, ('nonCLD' if used_blazn==True else ' '), cleartext[:_TERMINAL_WIDTH-30].encode('utf8', 'replace').replace('\n',' ')) + except NoTextFoundError: + print " (empty)" + except ProcessingError: + print " ProcessingError:" + print '\n'.join(' '+line for line in traceback.format_exc().splitlines()) + except: + print "some exception" + print traceback.format_exc() + pass + + try: + cur.execute("NOTIFY have_cleartext, '%s'", (articleId,)) # !bn: replace with "SELECT pg_notify('have_cleartext',id);" in a trigger + except psycopg2.InterfaceError, e: + print 'DB exception. Traceback follows. Sleeping for a minute, then reconnecting.' + traceback.print_exc() + time.sleep(60) + try: db.close() + except: pass + try: db, cur = openConnection('realtime cleaner') + except: pass + +def handle_resize(signum, frame): + "Update global variable _TERMINAL_WIDTH on SIGWINCH" + global _TERMINAL_WIDTH + try: + h, w = map(int, os.popen('stty size', 'r').read().split()) + except: + h, w = 25, 80 + _TERMINAL_WIDTH = w + +if __name__ == '__main__': + #signal.signal(signal.SIGWINCH, handle_resize) # zmq doesn't like it + handle_resize(None, None) + main() diff --git a/runtime_article_sucker_gls.py b/runtime_article_sucker_gls.py new file mode 100755 index 0000000..3a58e70 --- /dev/null +++ b/runtime_article_sucker_gls.py @@ -0,0 +1,423 @@ +#!/usr/bin/python + +# +# !!! TODO: ce je final_url hit, oznac feed_article za removal pa tisina. +# + + + +# +# sucker za downloadanje articlov +# logika je simpl: +# - najd request +# - poskus downloadat [pazi: referer, sledenje redirectom] +# - ce ne rata logiri in pomoznosti reenqueuei al pa oznac za problematicnga +# - ce rata, posti article in odstran request iz urlpoola +# + +# +# implementacija: +# - glavn thread nabira requeste in jih fila v queue +# - worker threadi dequeuajo po en request in ga sprocesirajo +# +# glavn thread more vedt kere requeste je ze obdelu, zato si jih na zacetku vse odklene, pol pa postopoma zaklepa; da ne enqueuea enga veckrat .. +# besides: post articla bi meu lahko check ce je vec k en article na feed_article - skode pa ni velke razn loada ... +# + +# +# annoyance: Queue ne vzame Eventa, tko da bo wait na queue vsake par sekund timeoutu +# + +# +# problematicn: ko da feed nove clanke, selectam enga, pol pa takoj poskusm naslednga - da bi zapovnu Q - pa ne gre, zato spim 60s. +# annoying..... +# +# !!! to se da popravt (a je res?) tko da ce ne dobis nazaj tok vrstic k si jih zahtevu, pocakas 20 sekund namest da gres takoj spet najedat +# hopefully bo v tem casu ze zdownloadu, je pa 3x mn k TIMEOUT... +# + +# +# gls verzija: +# main thread (try_enqueue funkcija) selecta VSE +# enqueued feed_article z +# id > last_selected_id (da preskocmo stvari k jih ze mamo) +# kjer site ni disabled IN ni locked (locked naceloma ne bo noben, admin disablani pa so lahko) +# in feed_article ni locked IN je enqueued IN next_attempt je > now() (enq morjo bit, locked naceloma ne bodo, next_att je pa sock timeout handling ipd) +# (selecta se tud feed url, ker se nastav za Referer HTTP field) +# in jih submita suckerjem. +# +# suckerji : run:process_request:do_request:DB_post_article updatajo feed_article na locked=false, enqueued=false +# + + +from common import * +import threading, Queue, socket +import urllib2 +import cookielib +from traceback import * +import base64 +import time +import gc +import sys +import zmq +import StringIO, gzip +import random +import datetime +import heapq +import collections +import pdb + +SOCKET_TIMEOUT=30 +DATABASE_TIMEOUT=5 +DATABASE_SHORT_TIMEOUT=5 +N_THREADS=37 +THREAD_Q_TIMEOUT=60 # was 5 +QUERY_MULT=113 +MAX_DB_FETCH_LATENCY=2*60 +MAX_DB_FULL_FETCH=6*3600 +CLEANER_Q_SIZE=1000 +RATELIMIT_TIMEOUT=4.0 + +def partition_work_by_site(L): + W = collections.defaultdict(dict) + for e in L: W[e['siteid']][e['id']] = e + return W + +def exc_str(exc): + try: + return unicode(exc).encode('utf8','replace') + except: + try: + t = repr(exc) + except: + try: exc_type = repr(type(exc)) + except: exc_type = '(unknown type)' + t = '' % (exc_type, getattr(exc,'args','(no args)')) + if type(t)==unicode: t = t.encode('utf8','replace') + return t + +class RLQueue(object): + # v queueju (heap) so tupli (next_suck, siteid, dict(fa.id:request, ...)) + # s setom dobimo avtomaticno deduplikacijo (otoh vrstn red ni po fa.id ASC) + def __init__(self): + self.Q = [] + def __len__(self): + return len(self.Q) + def extend(self, L): + W = partition_work_by_site(L) + when = time.time() + RATELIMIT_TIMEOUT + if len(self.Q) == 0: + self.Q = [(when, siteid, W[siteid]) for siteid in W] + heapq.heapify(self.Q) + else: + active_sites = {x[1]:x[2] for x in self.Q} + for siteid in W: + if siteid in active_sites: + active_sites[siteid].update(W[siteid]) + else: + heapq.heappush(self.Q, (when, siteid, W[siteid])) + + def pop(self): + if len(self.Q) == 0: + pdb.set_trace() + raise IndexError + when, siteid, worklist = self.Q[0] + if when > time.time(): time.sleep(max(0,when - time.time())) + + work = worklist.popitem() + if len(worklist) == 0: heapq.heappop(self.Q) + else: heapq.heapreplace(self.Q, (time.time() + RATELIMIT_TIMEOUT, siteid, worklist)) + + return work[1] # discard "art_id:" part + +class sucker(threading.Thread): + numthreads = 0 + + def __init__(self, worklist, killcmd, db, R): + #self.db = db + # pyPgSQL vsaj ne trd da je threadsafety=2... + self.db = DB_connect('article sucker: suck') + self.worklist = worklist + self.retQ = R + self.killcmd = killcmd + self.thread_id = sucker.numthreads + sucker.numthreads += 1 + socket.setdefaulttimeout(SOCKET_TIMEOUT) + threading.Thread.__init__(self) + self.setDaemon(True) + + def run(self): + print "thread %d ready to work" % self.thread_id + while not self.killcmd.isSet(): + self.process_request() + + def process_request(self): + work = None + try: + work = self.worklist.get(block=True, timeout=THREAD_Q_TIMEOUT) + except (Queue.Empty): + print "> [%d] no work received in %d seconds" % (self.thread_id, THREAD_Q_TIMEOUT) + return + else: + print ("working on request %s" % work[1]).encode('ascii', 'replace') + self.do_request(work) + if random.random() < 0.01: + print "> [%d] GC" % (self.thread_id) + gc.collect() + + def do_request(self, work): + (ref_url, rq_url, art_id, art_feedid, art_feedsiteid, art_siteid, max_fau_seq) = work + + blacklisted_extensions = ['asx', 'dts', 'gxf', 'm2v', 'm3u', 'm4v', 'mpeg1', 'mpeg2', 'mts', 'mxf', 'ogm', 'pls', 'bup', 'a52', 'aac', 'b4s', 'cue', 'divx', 'dv', 'flv', 'm1v', 'm2ts', 'mkv', 'mov', 'mpeg4', 'oma', 'spx', 'ts', 'vlc', 'm4v', 'mp4', 'mp3', 'zip'] + + rq_url_lc = rq_url.lower() + if any(rq_url_lc.endswith(x) for x in blacklisted_extensions): + # let's not be even remotely interested in this url. + print "[%d] > ignoring %s" % (self.thread_id, rq_url) + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, None, max_fau_seq,903, '', '') + DB_log(self.db, art_siteid, rq_url, 903, -1) + print "[%d] > unlocking %d" % (self.thread_id, art_siteid) + DB_site_access_unlock(self.db, art_siteid) + return + + try: + print "> open" + rq = urllib2.Request(url=rq_url) + rq.add_header('User-Agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.8.0.6) Gecko/20060728 Firefox/1.5.0.6') + rq.add_header('Accept', 'text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,*/*;q=0.5') + rq.add_header('Accept-Language', 'en-us,en;q=0.5') + rq.add_header('Accept-Charset', 'ISO-8859-1,utf-8;q=0.7,*;q=0.7') + if type(ref_url)==unicode: ref_url = ref_url.encode('utf8','replace') + rq.add_header('Referer', ref_url) + http_opener = urllib2.build_opener(urllib2.HTTPRedirectHandler) + print "[%d] > request set up" % self.thread_id + hnd = http_opener.open(rq) + print "[%d] > opened" % self.thread_id + # sync empty cookie jar + except urllib2.HTTPError, exc: + print "%d > excpt: httperror" % self.thread_id + #err_page = exc.read() + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, None, max_fau_seq,918, '', exc_str(exc))#base64.b64encode(err_page)) + DB_log(self.db, art_siteid, rq_url, 918, exc.code) + print "[%d] [%d] %s" % (self.thread_id, exc.code, rq_url) + except urllib2.URLError, e: + print "%d > excpt: urlerror" % self.thread_id + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, None, max_fau_seq,901, '', exc_str(e)) + DB_log(self.db, art_siteid, rq_url, 901, -1) + print "[%d] %s" % (self.thread_id, rq_url) + except socket.timeout, e: + DB_retry_article(self.db, art_id) + print "[%d] timeout; url set for retry" % self.thread_id + print format_exc() + except (socket.gaierror, socket.herror, socket.error), e: + DB_retry_article(self.db, art_id) + print "[%d] socket error?" % self.thread_id + print format_exc() + except Exception, e: + # ne mormo vsega pohandlat .... + print "[%d] > excpt" % self.thread_id + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, None, max_fau_seq, 900, '', exc_str(e)) + DB_log(self.db, art_siteid, rq_url, 900, -1) + print ("[%d] %s" % (self.thread_id, rq_url)).encode('ascii', 'replace') + else: + print "[%d] > ok" % self.thread_id + + try: + page = hnd.read() + except: + DB_retry_article(self.db, art_id) + print "[%d] caught exc, url set for retry ----" % self.thread_id + else: + code = hnd.code + headers = str(hnd.headers) + final_url = hnd.url + size = len(page) + + if size > 2000000: + print "[%d] downloaded a suspiciously large file [aid = %d, len = %d]; discarding." % (self.thread_id, art_id, size) + DB_log(self.db, art_siteid, rq_url, 904, -size) + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, final_url, max_fau_seq, 904, headers, '') + return + + if code == None: + print "[%d] someone is screwing with us. discard. [aid = %d, len = %d]; discarding." % (self.thread_id, art_id, size) + DB_log(self.db, art_siteid, rq_url, 905, -size) + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, final_url, max_fau_seq, 905, headers, '') + return + + if hnd.headers.get('content-encoding') == 'gzip': + print "[%d] decompressing..." % self.thread_id + try: + contentIO = StringIO.StringIO(page) + gzipFile = gzip.GzipFile(fileobj=contentIO) + page = gzipFile.read() + except Exception, e: + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, None, max_fau_seq, 917, headers, exc_str(e)) + DB_log(self.db, art_siteid, rq_url, 917, -1) + print "decompression failed." + DB_site_access_unlock(self.db, art_siteid) + return + + + print "%d > post" % self.thread_id + + # + # catch: page encoding is not known, so we need to treat it like a bytestream until it gets parsed by BeautifulSoup + # but database expects an utf8 string. utf8 knows invalid byte sequences + # (alternative: SQL_ASCII: noninterpreted bytestream; don't want to insert that into db.connection) + # therefore, all pages (including already-utf8...) need to be encoded (...again) + + # log pred post - ce slucajno poginemo zarad ctl-c -> daemon, hocmo vsaj log, ne clanka... + + DB_log(self.db, art_siteid, rq_url, code, size) + DB_post_article(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, rq_url, final_url, max_fau_seq, code, headers, page) + self.retQ.put((art_id, headers, page), block=True) # prevent potential infinite retQ growth if dead cleaners + print "[%d] > log" % self.thread_id + print "[%d] [%d] %s" % (self.thread_id, code, rq_url) + + # print "[%d] cleanup" % (self.thread_id) + # decoded = decoder.decodeText(page, hnd.headers) + # cleaned = cleaner.parseCleartext(decoded, final_url) + # DB_post_cleaned(self.db, art_id, art_feedid, art_feedsiteid, art_siteid, cleaned) + # print "[%d] done cleaning" % (self.thread_id) + + print "[%d] > unlocking %d" % (self.thread_id, art_siteid) + DB_site_access_unlock(self.db, art_siteid) + +def cleaner_sink(Q): + db = DB_connect('article sucker: sink') + + zmqctx = zmq.Context() + sock = zmqctx.socket(zmq.PUSH) + sock.setsockopt(zmq.HWM, 100) + sock.bind('tcp://*:1234') + + while True: + id, headers, page = Q.get() + print "sending", id, "to the cleaners" + try: + sock.send_multipart([str(id),headers,page])#, flags=zmq.NOBLOCK) + except: + print "%% send_multipart failed." + DB_unlock_cleaning(db, id) + +FIND_ALL_REQS = """ + SELECT + f.URL AS ref_url, argmin(fau.seq, fau.url) as URL, a.id, a.feedid, a.feedsiteid, a.siteid, max(fau.seq) as max_fau_seq + FROM feed_article AS a + INNER JOIN feed AS f ON a.feedid = f.id + INNER JOIN site ON a.siteid = site.id + INNER JOIN feed_article_urls AS fau ON a.id = fau.fa_id + WHERE + NOT site.disabled + AND NOT site.locked + AND a.enqueued + AND a.next_attempt < NOW() + AND a.id > %s + GROUP BY + ref_url, a.id, a.feedid, a.feedsiteid, a.siteid +""" +#""" +# SELECT +# f.URL AS ref_url, a.URL, a.id, a.feedid, a.feedsiteid, a.siteid +# FROM feed_article AS a +# INNER JOIN feed AS f ON a.feedid = f.id +# INNER JOIN site ON a.siteid = site.id +# WHERE +# NOT site.disabled +# AND NOT site.locked +# AND a.enqueued +# AND a.next_attempt < NOW() +# AND a.id > %s +#""" + + +last_full_select = time.time() +last_fetch_ts = time.time() +last_req_id = -1 + +def try_enqueue(db, T, Q): + """ + called once for one enqueued feed_article to be transfered from private worklist (T) to sucker queue (Q) + if T is empty or last_fetch_ts+MAX_DB_FETCH_LATENCY < now(), fetch everything new (i.e. with id > last_req_id) + from the database and add it to T. if T is still empty, sleep for a while + sleep timeout depends on Q being empty (or not.) + only enqueue articles on sites the were idle for 3s+. + + !bn: BUG: if article is rescheduled for download, it will return to the db and stay there until + crawler is restarted .. because of last_req_id. fix later. much later. + """ + global last_full_select, last_fetch_ts, last_req_id + + print "O_o ... try to add one feed_article to sucker queue (T.len=%d, Q.len=%d)" % (len(T), Q.qsize()) + + if len(T) == 0 or (last_fetch_ts + MAX_DB_FETCH_LATENCY) < time.time(): + print " .. worklist is empty, nagging the database." + cur = db.cursor() + + fetch_min_req_id = last_req_id + if (last_full_select + MAX_DB_FULL_FETCH) < time.time(): + fetch_min_req_id = -1 + last_full_select = time.time() + + cur.execute(FIND_ALL_REQS, (fetch_min_req_id,)) + R = cur.fetchall() + db.commit() + T.extend(R) + last_req_id = max(last_req_id, max(x['id'] for x in R) if len(R) > 0 else -1) + last_fetch_ts = time.time() + print " .. after db fetch .. T.len = %d" % len(T) + + if len(T) == 0: + " .. still no work in db. sleep for a while. ZZZZzzz." + if Q.qsize() > 0: # neki je se v queueju, selectat pa ne mormo nic - mogoce so zaklenjeni sajti ? + time.sleep(DATABASE_SHORT_TIMEOUT) + else: + time.sleep(DATABASE_TIMEOUT) # there is obviously nothing in the database, wait a bit and return + return + + print "O_O really try to enqueue work (T.len=%d, Q.len=%d prior to Q.put(T.pop()))" % (len(T), Q.qsize()) + rq = T.pop() # at least try to avoid hitting the same site multiple sequentially + Q.put(rq, block=True) + +def main(): + threads = [] + Q = Queue.Queue(QUERY_MULT*N_THREADS) # fetcher->sucker(s) + R = Queue.Queue(CLEANER_Q_SIZE) # sucker(s)->cleaner_sink + + socket.setdefaulttimeout(SOCKET_TIMEOUT) + db = DB_connect('article sucker: discovery') + + if not(len(sys.argv) > 1 and sys.argv[1] == "skip-unlock"): + print "Unlocking sites & feedarticles" + DB_unlock_sites(db) + DB_unlock_feedarticles(db) + + sink = threading.Thread(target=cleaner_sink, args=(R,)) + sink.daemon = True + sink.start() + + for i in range(N_THREADS): + evt = threading.Event() + thr = sucker(Q, evt, db, R) + threads.append((thr,evt)) + thr.start() + try: + T = RLQueue() # private request queue + while True: + try_enqueue(db, T, Q) + except (KeyboardInterrupt): + # merge down + for thr,evt in threads: + evt.set() + for thr,evt in threads: + thr.join() + except: + for thr,evt in threads: + evt.set() + for thr,evt in threads: + thr.join() + print format_exc() + +if __name__ == '__main__': + main() diff --git a/runtime_feed_sucker_gx.py b/runtime_feed_sucker_gx.py new file mode 100755 index 0000000..762e6d6 --- /dev/null +++ b/runtime_feed_sucker_gx.py @@ -0,0 +1,265 @@ +#!/usr/bin/env python +#-*- indent-tabs-mode: nil -*- + +""" + async version of feed sucker, gevent version, now with concurrent db fetch! + + while true: + every once in a while, submit db->feed_queue thread; make sure to remove duplicates + (select one most eager feed for every site) + submit work to greenlet.Pool + store results + +""" + +import gevent, gevent.monkey, gevent.pool, gevent.queue +import gevent_psycopg2 +gevent.monkey.patch_all() +#gevent.monkey.patch_socket() +gevent_psycopg2.monkey_patch() +import common +import feedparser +import socket +import time, datetime, pytz +import sys +import traceback +import pdb +import re +import libxml2 +import random + +SOCK_TIMEOUT = 10 # no point in waiting ... +N_THREADS = 257 +IDLE_WAIT = 100 +FEED_GET_INTERVAL = 60 +FEED_GET_QUERY = "SELECT * FROM (SELECT *, row_number() OVER (PARTITION BY siteid ORDER BY next_scan) FROM feed WHERE NOT disabled AND next_scan < now()) subq WHERE row_number < 2" + +#fff = open('feed_er.log','w') + +# prevent excessive load times of external entity defs from w3.org +def dont_load_external(URL, ID, context): + print '**** tried to load', URL +# fff.write('%s %s\n' % (str(time.time()), URL)) +# fff.flush() + return '' +#libxml2.setEntityLoader(dont_load_external) + +def suck_feed(feed, Q): + try: + print "[%8d] starting suck <%s>" % (feed['id'], feed['url']) + # !bn: TODO: kaj se zgodi ce dobimo etag-match? upam da ne kill feed? + # -- result.status == 304, entries = [], etag=NULL ! + et = str(feed['last_etag']) if 'last_etag' in feed else '' + data = feedparser.parse(feed['url'], etag=et) + print "[%8d] parsed" % (feed['id'],) + Q.put((feed, data, None)) + except: + Q.put((feed, None, traceback.format_exc())) + #print 'end suck feed' + +fuckups = 0 + +# feed.pruning_mode: +# NULL = normal: don't +# '0' = normal: don't, determined by learning +# 'L' = learning mode -- discover rule +# 'D' = input dataset is sorted by time, descending, with no missing pubdates +# 'A' = "" ascending +# 'X' = no rule found +# 'Y' = rule verification failed + +stats_sum = (0,)*5 + +def process_result(db, feed, result, exception): + global fuckups + global stats_sum + + print "[%8d] processing result. [%s] <%s>" % (feed['id'], feed['pruning_mode'], feed['url'].encode('ascii', 'replace')) + try: + if exception: # something awful happened + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 979, exception) + common.DB_disable_feed(db, feed['id'], now=True, flag=40) + elif not result: # something possibly even more awful might have happened + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 978, '') + common.DB_disable_feed(db, feed['id'], now=True, flag=41) + #pdb.set_trace() + elif 'status' not in result: # how did that happen ? O.o + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 977, '') + common.DB_disable_feed(db, feed['id'], now=True, flag=42) + #pdb.set_trace() + elif 'feed' not in result: # how did that happen ? O.o!! + print 'FFFUUUUU!' + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 967, '') + common.DB_disable_feed(db, feed['id'], now=True, flag=46) + #pdb.set_trace() + elif result['status'] >= 400: # feed gone + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 976, result['status']) + common.DB_disable_feed(db, feed['id'], now=True, flag=43) + #pdb.set_trace() + elif 'entries' not in result: + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 975, result['status']) + common.DB_disable_feed(db, feed['id'], now=True, flag=44) + #pdb.set_trace() + else: + # completely skip 304 responses; DO NOT update etag in DB: some 304 responses don't echo etag + if not result['status'] == 304: + + # implement entry submission pruning here. + # feed is updated 3x in this block. fix me maybe? + # + # poskusmo najdt za zacetk samo time-ordered-descending (D) + # ce so prisotni VSI pubdateji, in ce so VSI urejeni >= + # + # ce pise v dbju 'L', vpisemo v DB 'D' + # ce pise v dbju 'D', insertamo samo tiste entryje k so >= od zadnga timestampa + + # wtf: povsod uporabljamo pytz.UTC; v db se inserta ___+02. pravilno povecan. + # psycopg2 screwy? + + gt = lambda x,y: x>=y + issorted = lambda u: all(map(gt, u[:-1], u[1:])) + mktime = lambda e: common.conv_time(e, datetime.datetime(1,1,1,tzinfo=pytz.UTC)) + + foofeed = any('updated_parsed' not in e for e in result.entries) + stamped_entries = [mktime(e) for e in result.entries] + if any(e > datetime.datetime.now(tz=pytz.UTC) for e in stamped_entries): foofeed = True + latest_entry_ts = max(stamped_entries) if len(stamped_entries) > 0 else datetime.datetime(1,1,1,tzinfo=pytz.UTC) + + # learning + if feed['pruning_mode'] == 'L': + cur = db.cursor() + if foofeed: + print '[%8d] +++ setting feed to pruning: DISABLED.' % (feed['id'],) + cur.execute("UPDATE feed SET pruning_mode='0' WHERE id=%s", (feed['id'],)) + elif issorted(stamped_entries): + print '[%8d] +++ setting feed to pruning: time sorted descending.' % (feed['id'],) + cur.execute("UPDATE feed SET pruning_mode='D' WHERE id=%s", (feed['id'],)) + elif issorted(list(reversed(stamped_entries))): + print '[%8d] +++ setting feed to pruning: time sorted ascending.' % (feed['id'],) + cur.execute("UPDATE feed SET pruning_mode='A' WHERE id=%s", (feed['id'],)) + else: + cur.execute("UPDATE feed SET pruning_mode='X' WHERE id=%s", (feed['id'],)) + db.commit() + + # verification + prune = '0' + if feed['pruning_mode'] == 'D': + if issorted(stamped_entries) and not foofeed: prune = 'D' + else: + cur = db.cursor() + cur.execute("UPDATE feed SET pruning_mode=%s WHERE id=%s", ('Y' if not foofeed else 'F', feed['id'],)) + db.commit() + elif feed['pruning_mode'] == 'A': + if issorted(list(reversed(stamped_entries))) and not foofeed: prune = 'A' + else: + cur = db.cursor() + cur.execute("UPDATE feed SET pruning_mode=%s WHERE id=%s", ('Y' if not foofeed else 'F', feed['id'],)) + db.commit() + + R = [common.post_entry(db, feed, entry, acl=None, cutoff_ts=feed['pruning_ts_last'] if prune in ('A', 'D') else None) for entry in result.entries] + + stats = map(sum,zip((0,0,0,0,0), *R)) + stats_sum = map(lambda x,y:x+y, stats_sum, stats) + n_new = stats[3] + common.DB_update_feed_stat(db, feed, len(result.entries), n_new) # feed(n_i, n_e, total, total_new) + common.DB_update_feed(db, feed, result, latest_entry_ts) # feed_ps, feed(etag, failures), feed(pruning_ts_last) + + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], result['status'], n_e=len(result.entries), n_i=n_new, unchanged=feed['unchanged_iter']) + + if result.bozo and result.bozo==1 and result.bozo_exception: + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 973, str(result.bozo_exception)) + + print "[%8d] suck complete with (%d/%d/%d/%d/%d/%d) -> (%d/%d/%d/%d/%d)" % ((feed['id'],) + tuple(stats) + (len(result.entries),) + tuple(stats_sum)) + else: + print '[%8d] feed unchanged since last scan' % (feed['id'],) + + common.DB_update_feed_scan(db, feed) # nextscan = now + ttl*rnd + except: +# sys.exit(-1) + raise + common.DB_log_feed_suck(db, feed['id'], feed['siteid'], 974, traceback.format_exc()) + common.DB_disable_feed(db, feed['id'], now=True, flag=45) + #pdb.set_trace() + db.commit() + +def process_results(db, Q): + c = 0 + for result in iter(Q.get, StopIteration): + c += 1 + process_result(db, *result) + return c + +def fetch_potential_feeds(db, work): + print "fetching feed list." + + cur = db.cursor() + cur.execute(FEED_GET_QUERY) + feeds = cur.fetchall() + db.commit() + + print "submitting new work" + + old_fids = { feed['id'] for feed in work[0] } + new_fids = { feed['id'] for feed in feeds } + + new_useful_feeds = new_fids - old_fids + work[0].extend(feed for feed in feeds if feed['id'] in new_useful_feeds) + + random.shuffle(work[0]) # randomize scan order + + work[1] = time.time() + print "db fetch done" + + +def main(): + socket.setdefaulttimeout(SOCK_TIMEOUT) + db = common.DB_connect('feed sucker [gx]') + dbd = common.DB_connect('feed sucker [gx:discovery]') + cur = db.cursor() + common.DB_prepare(db, {'feedsuck'}) +# cur.execute('UPDATE feed SET next_scan = now() + (effective_ttl * random())::integer::reltime') + db.commit() + + work = [[], 0] # feed list, last fetch + + Q = gevent.queue.Queue(maxsize=0) # functions as a channel: put blocks until get + work_pool = gevent.pool.Pool(size=N_THREADS) + workers = [] + + dbw = gevent.spawn(process_results, db, Q) + + while True: + if (work[1] + FEED_GET_INTERVAL) < time.time(): + # submit db fetch work + work[1] = time.time() # don't spawn it next time around the loop.. + workers.append(work_pool.spawn(fetch_potential_feeds, dbd, work)) + else: + # submit some normal work + if len(work[0]) == 0: + time.sleep(5) + continue + else: + print "submitting work; queue size = %d, active workers = %d" % (len(work[0]),len(workers)) + workers.append(work_pool.spawn(suck_feed, work[0].pop(), Q)) + + # housekeeping + active_workers = [] + for worker in workers: + if worker.ready(): + worker.join() + else: + active_workers.append(worker) + workers = active_workers + + # catch Ctl-C, put StopIteration, etc.. + +# greenlets = [work_pool.spawn(suck_feed, feed, Q) for feed in feeds] +# gevent.joinall(greenlets) +# Q.put(StopIteration) +# dbw.join() + + +if __name__ == '__main__': + #import cProfile + #cProfile.run('main()') + main() diff --git a/serialize.py b/serialize.py new file mode 100644 index 0000000..c7e34b5 --- /dev/null +++ b/serialize.py @@ -0,0 +1,88 @@ +""" +Serialization (XML only for now) of newsfeed articles. +""" + +import re +import jinja2 +import util + +def xml_str(val, key=None): + "Unicode, xml-safe variant of `val`. `key` is for debug only." + if val is None: + return '' + elif type(val) in (str,unicode): + val = util.xmlEscape(val) + if type(val) == str: val = val.decode('utf8','replace') + elif type(val) in (list,tuple,set): + val = type(val)(xml_str(x,key) for x in val) + elif type(val) in (long, int, float): + val = str(val) + else: raise ValueError, "Can't handle type %r for key %r" % (type(val), key,) + return val + +def remove_xml_header(xml): + if xml.startswith('' in xml: + return xml[xml.find('>') + 1:] + return xml + +def mark_paras(txt): + """ + Given plain text where each line is a separate paragraph, returns + a safe XML (= escaped content) with

    -marked paras. + The safeness is implicit; for use in a jinja2 template, use |safe. + """ + if not txt: return '' + return '\n'.join('

    %s

    ' % util.xmlEscape(line.strip()) for line in txt.splitlines() if line.strip()) + +env = jinja2.Environment() +env.finalize = lambda x: '' if x is None else x +env.filters['x'] = xml_str # `e` is the built-in HTML-escaping filter; we use a stricter one. Overriding e and using autoescaping does not work :/ +env.filters['remove_xml_header'] = remove_xml_header +env.filters['iso_utc'] = lambda t: util.iso_utc_time(t) if t else '' +env.filters['mark_paras'] = mark_paras + +FORMAT_VERSION = '4.0' +TEMPLATE = env.from_string(''' +{% macro geo_xml(geo) %} + + {% if geo.lat and geo.lon %}{{ geo.lat }}{{ geo.lon }}{% endif %} + {% if geo.city %}{{ geo.city|x }}{% endif %} + {% if geo.country %}{{ geo.country|x }}{% endif %} + +{% endmacro %} + +
    + + {% if source_name %}{{ source_name|x }}{% endif %} + {{ source_hostname|x }} + {% if source_geo %}{{ geo_xml(source_geo) }}{% endif %} + {% if source_tags %}{% for tag in source_tags %}{{ tag|x }}{% endfor %}{% endif %} + + + {{ feed_title|x }} + {{ feed_url|x }} + + {{ url|x }} + {% if publish_date %}{{ publish_date|iso_utc|x }}{% endif %} + {{ retrieved_date|iso_utc|x }} + {{ lang|x }} + {% if google_story_id %}{{ google_story_id|x }}{% endif %} + {% if bloomberg_score %}{{ bloomberg_score|x }}{% endif %} + {% if geo %}{% for g in geo %}{{ geo_xml(g) }}{% endfor %}{% endif %} + {% if tags %}{% for tag in tags %}{{ tag|x }}{% endfor %}{% endif %} + {% if img %}{{ img|x }}{% endif %} + {{ title|x }} + {{ cleartext|mark_paras|safe }} + {% if rych %}{{ rych|remove_xml_header|safe }}{% endif %} + {% if xrych %}{{ xrych|remove_xml_header|safe }}{% endif %} +
    +''') + +######## + +def xml_encode(article): + "XML encoding of an article" + xml = TEMPLATE.render(article).encode('utf8','replace') + return '\n'.join(line for line in xml.splitlines() if line.strip()) + + diff --git a/start_pipeline.py b/start_pipeline.py new file mode 100755 index 0000000..d39a500 --- /dev/null +++ b/start_pipeline.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python + +""" +BROKEN. +""" + +from subprocess import call + +pipeline = [ + 'db2zmq_cleartext.py', + 'zmq2zmq_enrych.py', + 'zmq2zmq_xenrych.py', + 'zmq2zmq_bloombergness.py', + 'zmq2http_all.py', +] +ports = range(13371, 13380) +assert len(pipeline) <= len(ports)+1 + +for cmd, port_in, port_out in zip(pipeline, [None]+ports, ports+[None])[:0]: + with open('/tmp/pipeline_part','w') as f: + f.write('echo -- %s --port-in=%s --port-out=%s' % (cmd, port_in, port_out)) + call(['tmux', 'split-window', '''bash --rcfile /tmp/pipeline_part)''']) +call(['tmux', 'split-window', 'bash --rcfile <(echo "cd ..; ./realtime_cleaner.py")']) +call(['tmux', 'select-layout', 'tiled']) +#call(['tmux', 'new-window']) + + diff --git a/util.py b/util.py new file mode 100644 index 0000000..9a91b4a --- /dev/null +++ b/util.py @@ -0,0 +1,446 @@ +import os, sys +import re, htmlentitydefs, string +import urllib2 +import time, datetime +import random +import gzip, StringIO +import inspect, traceback + +import socket +socket.setdefaulttimeout(5) #in seconds + + +def htmlUnescape(text): + """ + Taken from http://effbot.org/zone/re-sub.htm + Removes HTML or XML character references and entities from a text string. + + @param text The HTML (or XML) source text. + @return The plain text, as a Unicode string, if necessary. + """ + def fixup(m): + text = m.group(0) + if text[:2] == "&#": + # character reference + try: + if text[:3] == "&#x": + return unichr(int(text[3:-1], 16)) + else: + return unichr(int(text[2:-1])) + except ValueError: + pass + else: + # named entity + try: + text = unichr(htmlentitydefs.name2codepoint[text[1:-1]]) + except KeyError: + pass + return text # leave as is + return re.sub("&#?\w+;", fixup, text) + +def textifyHtml(html): + """ + Convert HTML `html` to text in a simple way: block-level elements + get surrounded by whitespace, other whitespace disappears, + all HTML tags are stripped. + """ + if html is None: return None # just in case + txt = html + txt = re.sub(']*>', '\n', txt) + txt = re.sub("<.*?>", "", txt) + txt = normalizePunctuation(txt, normalizeWhitespace=True) + txt = '\n'.join(line.strip() for line in txt.splitlines()) + txt = re.sub(" +"," ", txt) + txt = re.sub("\n+","\n", txt) + return txt + +def xmlEscape(txt, errors='ignore'): + """ + Replace "weird" chars with their XML entities. + For characters not allowed by XML (e.g. chr(0), chr(7), ...), raise ValueError + if `errors` is not set to "ignore"; silently skip otherwise. + """ + allowedChars = set( + string.uppercase + + string.lowercase + + string.digits + + '.,;:!?_-+/!@#$%*()=[]\\\'"| \t\n\r') + knownMappings = {'&':'&', '<':'<', '>':'>'} + + chars = list(txt) + for (i,c) in enumerate(chars): + if c not in allowedChars: + cc = ord(c) + if 0x20> + 0x00AB: u'"', # quotation mark << + 0x2039: u'"', # quotation mark > + 0x203A: u'"', # quotation mark < + 0x2022: u'*', # bullet point + 0x2032: u"'", # prime + 0x2033: u"''", # double prime + 0x0060: u"'", # inverted prime (`) + 0x02DD: u'"', # double acute accent + 0x02DC: u'~', # small tilde + 0x00A6: u'|', # broken bar + 0x2026: u'...', # ellipsis + 0x0133: u'ij', # ligature + 0xFB00: u'ff', # ligature + 0xFB01: u'fi', # ligature + 0xFB02: u'fl', # ligature + 0xFB03: u'ffi', # ligature + 0xFB04: u'ffl', # ligature + 0xFB06: u'st', # ligature + # The following codepoints are not defined in unicode. However, UnicodeDammit leaves them in the + # text sometimes. Assume they come from Windows-1252, map accordingly. + 0x0091: u"'", # quotation mark - single + 0x0092: u"'", # quotation mark - single + 0x0082: u"'", # quotation mark - single + 0x0084: u'"', # quotation mark + 0x0093: u'"', # quotation mark + 0x0094: u'"', # quotation mark + 0x0095: u'*', # bullet point + 0x0096: u'-', # en dash + 0x0097: u' -- ', # em dash + 0x0085: u'...', # ellipsis + } +_normalizedWhitespace = { + 0x000A: u' ', # \n + 0x000D: u' ', # \r + 0x0009: u' ', # \t + } +def normalizePunctuation(txt, normalizeWhitespace=False): + """ + Maps "exotic" unicode codepoints into their ASCII couterparts. For example, + em and en dash get mapped to a simple dash, smart quotes to '"', ellipsis + gets expanded etc. See source for details. + + If normalizeWhitespace is given, also maps all whitespace (incl newlines) to spaces. + """ + if normalizeWhitespace: + mapping = _normalizedPunctuation.copy() + mapping.update(_normalizedWhitespace) + else: + mapping = _normalizedPunctuation + return unicode(txt).translate(mapping) + + +def iso_utc_time(t): + "ISO string representing the UTC variant of a given datetime object." + return datetime.datetime.utcfromtimestamp(time.mktime(t.timetuple())).isoformat()+'Z' + + +def unique(lst, sorted=False): + """ + Return an iterator over the input list; only the first instance of each + multiple entry is returned. If sorted==True is given implying that the + input sequence is already sorted, this only affects performance, not the semantics. + """ + if sorted: + ilst = iter(lst) + lastSeen = ilst.next() + yield lastSeen + for el in ilst: #remaining elements + if el==lastSeen: + continue + lastSeen = el + yield el + else: + seen = set() + addToSeen = seen.add + for el in lst: + if not hasattr(el,'__hash__') or el.__hash__==None: + seen = list(seen) + addToSeen = seen.append + if el not in seen: + addToSeen(el) + yield el + + +def decodeText_simple(text, headers): + """ + Takes a HTTP response body (=text) and the corresponding headers (a dict or dict-like + object; httplib.HTTPResponse will do); + outputs the text as a unicode string. The encoding is guessed using a combination of + HTTP headers and the META ta inside HTML. If no encoding can be inferred, latin1 is assumed. + Characters that can't be decoded are left as-is. + Throws ValueError if headers do not indicate a text/* mime-type. + + Does not use any extra libraries, unlike decodeText(), which is more accurate. + """ + contentType = headers.get('content-type','text/html; charset=latin1') + if not contentType.startswith('text/'): + raise ValueError, "Can only convert HTTP responses with mime type text/*; got '%s' instead" % contentType + + # try to find the encoding in a meta tag (the regexp below does not cover all instances, but it's close) + m = re.search(''' ]+ ) + ''', text, re.IGNORECASE | re.VERBOSE) + if not m: + # no luck with META tags; try HTTP headers + m = re.search('charset=([\w0-9\-]+)', contentType) + + if m: + charset = m.group(1).replace('windows-','cp') + else: + charset='latin1' + + return text.decode(charset, 'ignore') + + +class MimeTypeError(ValueError): + pass + +def decodeText(txt, headers=None): + """ + Takes a HTTP response body (=text) and the corresponding HTTP headers (a dict or dict-like + object; httplib.HTTPResponse will do; see parseHttpHeaders() if you have a string); + outputs the text as a unicode string. The encoding is guessed using BeautifulSoup.UnicodeDammit + (which in turn uses chardet if installed), enhanced by the HTTP-suggested encoding. + + Raises MimeTypeError (subclass of ValueError) if headers do not indicate a text/* mime-type. + """ + from BeautifulSoup import UnicodeDammit + + # guess the charset suggested by HTTP headers + httpCharset = [] + if headers: + contentType = headers.get('content-type','') + + if not contentType.startswith('text/'): + raise MimeTypeError("Can only decode text documents (mime type text/*; got %s)" % contentType) + + m = re.search('charset=([\w0-9\-]+)', contentType) + if m: + httpCharset = [ m.group(1).replace('windows-','cp') ] + + ud = UnicodeDammit(txt, isHTML=True, overrideEncodings=httpCharset) # overrideEncodings is not enforced by UnicodeDammit, it's just tried + return ud.unicode + + +def parseHttpHeaders(headersTxt): + """ + Takes HTTP headers and parses them into a dict. Keys and values are lowercased. + """ + res = {} + for line in headersTxt.splitlines(): + if ':' not in line: + continue + key, val = line.split(':',1) + key = key.strip().lower() + val = val.strip().lower() + res[key] = val + return res + + +class Request2(urllib2.Request): + + def __init__(self, url, data=None, headers={}, + origin_req_host=None, unverifiable=False): + # unwrap('') --> 'type://host/path' + self.__original = unwrap(url) + self.type = None + # self.__r_type is what's left after doing the splittype + self.host = None + self.port = None + self.data = data + self.headers = {} + for key, value in headers.items(): + self.add_header(key, value) + self.unredirected_hdrs = {} + if origin_req_host is None: + origin_req_host = request_host(self) + self.origin_req_host = origin_req_host + self.unverifiable = unverifiable + +def readUrl(url, silent=True, unicodeIfPossible=True): + """ + Retrieve the contents of the specified HTTP URL. + In case of a text/* MIME, decodes it using the encoding from the HTTP headers + (META is ignored) and returns an unicode string. If MIME is different or + unicodeIfPossible==False is given, returns a byte string containing the original content, non-decoded. + Unless silent==True, all errors are ignored silently and an empty string is returned. + """ + + try: + from uastrings import userAgentStrings + except: + print 'WARNING - module uastrings.py not found. Function util.readUrl will now use a fixed user-agent string.' + userAgentStrings = ['Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.9.0.6) Gecko/2009011913 Firefox/3.0.6'] + + req = urllib2.Request(url=url) + req.add_header('user-agent', random.choice(userAgentStrings)) + #req.add_header('referer', 'http://news.google.com') + #req.add_header('connection', 'keep-alive') # doesn't work + #req.add_header('keep-alive','300') + req.add_header('accept-language', 'en-us,en;q=0.5') + req.add_header('accept-encoding', 'gzip,deflate') + req.add_header('accept', 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8') + req.add_header('accept-charset','ISO-8859-1,utf-8;q=0.7,*;q=0.7') + + try: + t = time.time() + f = urllib2.urlopen(req) + content = f.read() + if not silent: + print 'Fetched %d bytes in %.3f seconds (%s)' % (len(content), time.time()-t, url) + + # if content is compressed, decompress it + if f.headers.get('content-encoding') == 'gzip': + contentIO = StringIO.StringIO(content) + gzipFile = gzip.GzipFile(fileobj=contentIO) + content = gzipFile.read() + + # enforce unicode + if unicodeIfPossible: + try: content = decodeText(content, f.headers) + except ValueError: pass + return content + except: + import traceback, sys + traceback.print_exc(file=sys.stderr) + if silent: + return '' + else: + raise + + +def levenshtein(first, second): + """Find the Levenshtein distance between two strings.""" + if len(first) > len(second): + first, second = second, first + if len(second) == 0: + return len(first) + first_length = len(first) + 1 + second_length = len(second) + 1 + distance_matrix = [range(second_length) for x in range(first_length)] + for i in range(1, first_length): + for j in range(1, second_length): + deletion = distance_matrix[i-1][j] + 1 + insertion = distance_matrix[i][j-1] + 1 + substitution = distance_matrix[i-1][j-1] + if first[i-1] != second[j-1]: + substitution += 1 + distance_matrix[i][j] = min(insertion, deletion, substitution) + + return distance_matrix[first_length-1][second_length-1] + + +def findAll(lst, el): + """ + Returns a list of positions of all occurences of el in list lst. + """ + pos = [] + next = 0 + while True: + try: + next = string.index(substr,next)+1 + pos.append(next-1) + except: + return pos + + +def writeToFile(data, fn): + """ + Fills the file fn with data data. + If fn already exists, it is overwritten, otherwise created. + Data is utf-8 encoded prior to writing if needed. + """ + f = open(fn, 'w') + if isinstance(data,unicode): + data = data.encode('utf8') + f.write(data) + f.close() + +def log_calls(func): + """ + A function decorator that prints each invocation of the decorated function + (along with the arguments) to stdout. + """ + def logged_func(*args, **kwargs): + log = (">> %s(" % func.__name__) + ', '.join(map(repr,args)) + if kwargs: log += ", "+", ".join("%s=%r" % kv for kv in kwargs.items()) + log += ")" + print log + return func(*args, **kwargs) + return logged_func + +def restart_on_crash(log_exprs=[]): + """ + A function decorator that re-runs the wrapped function in case it raises an exception. + This is repeated until the function succeeds. + + `log_exprs` is a list of strings, each string being an expression whose value at the time + of exception is displayed. Example: + >>> @restart_on_crash(log_exprs=['b', 'a+b']) + >>> def divider(a): + >>> import random; random.seed(time.time()) + >>> for t in range(5): + >>> print a, 'divided by', b, 'is', a/b + >>> print 'done' + + The error report is also written to a (hardcoded) file. + """ + def decorator(func): + REPORT_FILE = os.path.abspath('./_crash_report.txt') + def wrapped_func(*args, **kwargs): + alles_gut = False + while not alles_gut: + try: + func(*args, **kwargs) + alles_gut = True + except: + print '%s() was restarted at %s because of the following error:' % (func.func_name, datetime.datetime.now().isoformat()) + traceback.print_exc() + + try: + # find the most nested invocation of `func` in the traceback + func_frame = None + tb = sys.exc_info()[2] + while True: + if tb.tb_frame.f_code == func.func_code: + func_frame = tb.tb_frame + if not tb.tb_next: break + tb = tb.tb_next + # evaluate the expression-to-be-logged in the scope of func + with open(REPORT_FILE, 'w') as f: + f.write('Crash in function %s at %s\n\n' % (func.func_name, datetime.datetime.now().isoformat())) + traceback.print_exc(file=f) + f.write('\n\nLogged variables/expressions:\n') + for log_expr in log_exprs: + try: log_val = repr(eval(log_expr, globals(), func_frame.f_locals)) + except: log_val = '(error while evaluating expression; %r)' % sys.exc_info()[1] + f.write('>>> %s: %s\n' % (log_expr, log_val)) + print 'More info can be found in %r' % REPORT_FILE + except: + print 'Additionally, an error was encountered trying to write the crash report to %r:' % REPORT_FILE + traceback.print_exc() + return wrapped_func + return decorator diff --git a/zmq2http_all.py b/zmq2http_all.py new file mode 100755 index 0000000..05e0d79 --- /dev/null +++ b/zmq2http_all.py @@ -0,0 +1,270 @@ +#!/usr/bin/env python +""" +Listens to incoming cleartext and enryched documents on zmq; +packages them into gzip files; +makes those files available via HTTP. + +The HTTP interface is public facing and the final stage of the pipeline. + +ZMQ subscribes to: zmq2zmq_enrych.py +ZMQ subscribers: none; HTTP is used from here on +""" + +import sys, os +sys.path.extend(('.', '..')) + +import threading +import traceback, errno +import datetime, time +import gzip +import re +import glob +import zmq +import bottle + +import util +import serialize + +# Apache's .htpasswd file with extra information: ACL tags users can access +HTPASSWD_FILE = '../newsfeed_users.htpasswd' + +# The cache of gzipped files +MAX_PRECACHE_SIZE = 10 * 1024**2 # in bytes; files larger than this are gzipped and made available +MAX_PRECACHE_AGE = 1800 # in seconds; files older than this are gzipped and made available +MAX_CACHE_AGE = 365 # in days; files older than this get deleted from cache +CACHE_DIRECTORY = 'cache.v%s' % serialize.FORMAT_VERSION +FILENAME_TEMPLATE = CACHE_DIRECTORY+'/%(acl_tag)s/news-%(time)s.xml' + + +def compress_gzip(path): + """ + Compress file with path `path` to `path`.gz with gzip. Delete `path` from disk. + """ + f_in = open(path, 'rb') + f_out = gzip.GzipFile(os.path.split(path)[1], mode='wb', fileobj=open(path+'.gz', 'wb')) + while True: + buf = f_in.read(1024*1024) # 1MB + if not buf: break + f_out.write(buf) + f_out.close() + f_in.close() + + # If we get here with no exception, the original is safe to delete + os.remove(path) + +def makedirs(path): + "Create all directories on `path` as needed. If `path` already exists, do not complain." + try: + os.makedirs(path) + except OSError as exc: + if exc.errno != errno.EEXIST: raise # EEXIST is the expected cause of exception; ignore it + +def write_and_rotate(f, fn_template, data, root_xml_tag='article-set'): + """ + Write string `data` to filehandle `f`. If `f` is None or closed, + write into a new file with path `fn_template`. `fn_template` + can contain %(time)s which gets replaced with current timestamp. + If the file grows over `MAX_PRECACHE_SIZE` or older than `MAX_PRECACHE_AGE`, + it get gzipped. + + If `root_xml_tag` is given, makes sure each file is wrapped in an + xml element of that name and that header is present. + + Returns the file handle of the file `data` was actually written + into. + """ + if f is None or f.closed: + fn = fn_template % { + 'time': util.iso_utc_time(datetime.datetime.now()).replace(':','-') } + makedirs(os.path.split(fn)[0]) + f = open(fn, 'wb') + if root_xml_tag: + f.write('\n') + f.write('<%s format-version="%s">\n' % (root_xml_tag, serialize.FORMAT_VERSION)) + + f.write(data) + f.flush() + + m = re.search(r'\d\d\d\d-\d\d-\d\dT\d\d-\d\d-\d\dZ', f.name) + try: + # Parse the timestamp from the filename. Forgive ye oh us sinners. + file_start = time.mktime(datetime.datetime(*map(int, re.split('[^\d]', m.group(0))[:-1])).timetuple()) + file_age = time.mktime(datetime.datetime.utcnow().timetuple()) - file_start # time.time() is not OK because of timezones + except: + print 'Warning: could not parse filename %r. Traceback follows.' % f.name + traceback.print_exc() + file_age = -1 + + if f.tell() > MAX_PRECACHE_SIZE or file_age > MAX_PRECACHE_AGE: + if root_xml_tag: + f.write('' % root_xml_tag) + f.close() + compress_gzip(f.name) + print 'created', f.name+'.gz' + + return f + +@util.restart_on_crash(log_exprs=['article','active_f','acl_tag']) +def zmq_to_files(): + """ + Infinite loop: listen on the zmq socket for cleartext and rych docs, + pack them into gzip files. + Articles are first accumulated in *.xml files; once those grow over 10MB, + they are turned into *.gz. + These files are collected in subdirectories of `CACHE_DIRECTORY`: for each ACL tag + associated with the article, the article is stored into CACHE_DIRECTORY/acl_tag/ into + a file as described above. + """ + sock_in = zmqctx.socket(zmq.SUB) + sock_in.connect ("tcp://kopernik.ijs.si:13374") # output sockets: 13371=cleartext, 13372=enryched, 13373=xlike-enryched, 13374=bloombergness + sock_in.setsockopt(zmq.SUBSCRIBE, "") + + active_f = {} # XML output files. acl_tag -> active file object + + while True: + global article + article = sock_in.recv_pyobj() + + # hackish: ignore outdated articles + age_days = (datetime.datetime.now() - (article['publish_date'] or article['found_date']).replace(tzinfo=None)).days + if age_days > 7: + print 'skipping %s (%s, %d days old)' % (article['id'], (article['url']+'/FAKE/FAKE/').split('/')[2], age_days) + continue + + # write the article for each ACL tag + for acl_tag in article.get('acl_tagset'): + fn_template = FILENAME_TEMPLATE.replace('%(acl_tag)s',acl_tag) # hack: partial string interpolation + + # Get the file object into which we have to write this article. (None if no such .xml exists yet) + if not active_f.get(acl_tag): + # Reuse the xml file from the previous run, if any + fn = get_cached_file(fn_template, reverse_order=True) + active_f[acl_tag] = open(fn, 'ab') if fn else None + + # write the article + print 'processing %s (ACL=%s; %s%s%s)' % (article['id'], acl_tag, 'txt'*int('cleartext' in article), ' rych'*int('rych' in article), ' xrych'*int('xrych' in article)) + xml = serialize.xml_encode(article)+"\n" + active_f[acl_tag] = write_and_rotate(active_f[acl_tag], fn_template, xml) + + +def get_cached_file(fn_template, after='0000-00-00T00-00-00Z', reverse_order=False): + """ + Get the alphabetically (= chronologically) first file whose path + fits `fn_template` but has the date component larger than `after` + (an ISO timestamp string); + the template should contain "%(time)s". Returns filename or None + if no such file exists. + At the same time, deletes any file that fits the template and is + older than `MAX_CACHE_AGE`. + If `reverse_order` is given, return the newest matching file instead + of the oldest. + """ + after = after.replace(':','-') # normalize + + def file_age(fn): + "Age of file in days. Assumes the filename template from outer scope." + match = re_date.search(fn) + time_parts = map(int, match.groups()[:-1]) + file_time = datetime.datetime(*time_parts) + return (datetime.datetime.utcnow() - file_time).days + + re_date = re.compile(re.escape(fn_template).replace( + re.escape("%(time)s"), + r"(\d\d\d\d)-(\d\d)-(\d\d)T(\d\d)-(\d\d)-(\d\d)(?:[-.]\d+)?Z")) + glob_pattern = fn_template.replace("%(time)s", "*") + fns = sorted(fn.replace('\\','/') for fn in glob.glob(glob_pattern)) # glob with windows compatibility + fns = [fn for fn in fns if re_date.search(fn)] # only keep dates that really match the pattern + + # delete obsolete cache entries + while fns and file_age(fns[0]) > MAX_CACHE_AGE: + os.remove(fns[0]) + del fns[0] + print 'Deleted %s from cache' % fns[0] + + # get the requested file + newer_files = [fn for fn in fns if fn > fn_template%{'time':after}] + #print 'In cache (%s from %s): %s' % (glob_pattern, os.getcwd(), fns) + #print 'User wants newer than', fn_template%{'time':after} + #print 'New enough:', newer_files + if not newer_files: + return None + return newer_files[0 if not reverse_order else -1] + +def authenticate(user, password, acl_tag): + """ + Return True iff `user`:`password` is a valid combination and `user` has access to + articles tagged with `acl_tag`. + Configuration is read from `HTPASSWD_FILE`. If a line *immediately following* a user's + line is of the form "#acl acltag1,acltag2,...,acltagN", user is granted access to these acl tags. + """ + # get the info about this user from htpasswd + with open(HTPASSWD_FILE) as f: lines = f.readlines() + user_lines = [ + (line.strip(), next_line.strip()) for (line, next_line) in zip(lines, lines[1:]+['']) + if not line.strip().startswith('#') and line.lower().startswith((user or '').lower()+':')] + + # parse the info from the file + if not user_lines: + print 'UNKNOWN USER: %r' % user + return False + line, next_line = user_lines[0] + correct_password = line.split(':',1)[1] + if next_line.startswith("#acl "): + allowed_acl_tags = [tag.strip() for tag in next_line[len("#acl"):].split(',')] + else: + allowed_acl_tags = [] # no explicitly allowed ACL tags + + # check if permissions are OK. Every registered user has implicit access to the 'public' ACL tag + return password==correct_password and acl_tag in allowed_acl_tags+['public'] + +@bottle.route('/') +@bottle.route('/:acl_tag') +@bottle.route('/:acl_tag/') +def http_serve_stream(acl_tag='public'): + """ + Return, as an HTTP binary file, the oldest(!) file for the given ACL tag (e.g. "public"). + Takes an optional GET parameter "after"; only files created after this timestamp are considered. + """ + # Authorization. + try: + username = password = None + assert bottle.request.auth is not None, "No 'Authorization' HTTP header or 'HTTP_AUTHORIZATION' environment variable given." + username, password = bottle.request.auth + assert authenticate(username, password, acl_tag) + except Exception, e: + print 'DENIED REQUEST: http authorization token %r (user %r, password %r) requested acl_tag %r. Traceback follows.' % ( + bottle.request.environ.get('HTTP_AUTHORIZATION'), username, password, acl_tag) + traceback.print_exc() + return bottle.HTTPResponse("You don't have the permission to access this stream", status=401) + else: + print 'GRANTED: user %r, acl_tag %r' % (username, acl_tag) + + after = bottle.request.GET.get('after','0000-00-00T00-00-00Z') + fn_template = FILENAME_TEMPLATE.replace('%(acl_tag)s',acl_tag) # hack: partial string interpolation + if not fn_template: + return bottle.HTTPResponse( + "

    404

    Unknown stream: %s. Check http://newsfeed.ijs.si/ for possible URLs.", + status=404) + + path = get_cached_file(fn_template=fn_template+'.gz', after=after, reverse_order=(after==None)) + if path is None: + return bottle.HTTPResponse("

    404

    No gzips created after %s on stream %r yet." % (util.xmlEscape(after), acl_tag), status=404) + else: + dir, fn = os.path.split(path) + return bottle.static_file(fn, root=dir, download=acl_tag+'-'+fn, mimetype='application/x-gzip') + + +if __name__=='__main__': + zmqctx = zmq.Context() + # (the socket is created in its own thread; context should be created in the main thread) + + # Debug only: uncomment either of the two below for a single-threaded run + #zmq_to_files(); 1/0 + #bottle.debug(True); bottle.run(host='0.0.0.0', port=13380); 1/0 + + # zmq subscriber + threading.Thread(target=zmq_to_files).start() + + # http server + bottle.debug(True) + threading.Thread(target=bottle.run, kwargs={'host':'0.0.0.0', 'port':13380}).start() diff --git a/zmq2zmq_bloombergness.py b/zmq2zmq_bloombergness.py new file mode 100755 index 0000000..08bf4b7 --- /dev/null +++ b/zmq2zmq_bloombergness.py @@ -0,0 +1,77 @@ +#!/usr/bin/env python + +""" +Subscribes to the zmq feed of cleartexted articles. +Pushes them (multithreaded) to enrycher, publishes +enryched documents to a new zmq socket. +Also extracts and dumps some selected info from enrycher output +back into the feed_article_meta table in DB. + +ZMQ subscribes to: zmq2zmq_enrych.py +ZMQ subscribers: zmq2http_all.py +""" + +import os, sys, traceback +import urllib2 +import time +import zmq +import re + +sys.path.extend(('.', '..')) +import serialize +#from cleanDb import openConnection + +def is_bloomberg_scorable(article): + return article.get('lang')=='deu' or 'bloomberg' in article.get('source_hostname') + +def add_bloomberg_score(article): + """ + Adds a new attribute, 'bloomberg_score', to `article`. Returns None. + Uses Andrej Muhic's MATLAB service. + On failure, returns the unchanged article. + """ + try: + http_data = '\n'+serialize.xml_encode(article)+'\n' + try: + req = urllib2.Request(url='http://xling.ijs.si:9000/bloombergostxml', data=http_data) + print 'XX:', `urllib2.urlopen(req, timeout=1).read()` + except Exception as e: + print 'XX:', `e` + req = urllib2.Request(url='http://xling.ijs.si:9000/bloombergostxml', data=http_data) + f = urllib2.urlopen(req, timeout=1) + retval = f.read().decode('utf8','replace') + print retval + article['bloomberg_score'] = retval + except: + traceback.print_exc() + +if __name__=='__main__': + zmqctx = zmq.Context() + + sock_txt = zmqctx.socket(zmq.SUB) + sock_txt.connect ("tcp://localhost:13373") + sock_txt.setsockopt(zmq.SUBSCRIBE, "") + + sock_rych = zmqctx.socket(zmq.PUB) + sock_rych.setsockopt(zmq.HWM, 100) + sock_rych.bind('tcp://*:13374') + + try: + while True: + while not zmq.select([sock_txt], [], [], 3)[0]: + time.sleep(.1) + article = sock_txt.recv_pyobj() + + if is_bloomberg_scorable(article): + print 'processing %r' % (article['id'], ) + add_bloomberg_score(article) + else: + print '(%s %s)' % (article['id'], article['lang']) + + sock_rych.send_pyobj(article) + except: + traceback.print_exc() + finally: + sock_txt.close() + sock_rych.close() + zmqctx.term() diff --git a/zmq2zmq_enrych.py b/zmq2zmq_enrych.py new file mode 100755 index 0000000..34aa902 --- /dev/null +++ b/zmq2zmq_enrych.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python + +""" +Subscribes to the zmq feed of cleartexted articles. +Pushes them (multithreaded) to enrycher, publishes +enryched documents to a new zmq socket. +Also extracts and dumps some selected info from enrycher output +back into the feed_article_meta table in DB. + +ZMQ subscribes to: db2zmq_cleartext.py +ZMQ subscribers: zmq2zmq_xenrych.py +""" + +import os, sys, traceback +import urllib2 +import threading +import time +from Queue import Queue +import zmq +import re +import random + +sys.path.extend(('.', '..')) +from cleanDb import openConnection +from db2zmq_cleartext import lat_lon_dict + +# max number of concurrent requests +MAX_ENRYCHER_REQUESTS = 10 + +def is_enrychable(article): + "A pipeline filter; articles for which this returns True get enryched." + return (article['lang'] or 'xx').split('-')[0] in ('en','eng','enz','slv','sl') and article.get('cleartext') and len(article['cleartext'])<50000 + +def enrych(txt, url): + """ + Process plaintext `txt` (unicode or utf8) with enrycher; + return resulting xml (unicode). + `url` is the URL at which Enrycher lives. + """ + if isinstance(txt, unicode): + txt = txt.encode('utf8', 'ignore') + http_data = txt.lstrip().replace('\n','\n\n') + + req = urllib2.Request(url=url, data=http_data) + f = urllib2.urlopen(req, timeout=3) + return f.read().decode('utf8','replace') + + +def DB_write_rych_info(cur, article): + """ + Parse enrycher-mentioned geographical entities, add their coords to the DB. + Also, extend the 'geo' attribute of `article`. + """ + geo_ids = map(int, re.findall(r'resource="http://sws.geonames.org/(\d+)/"', article['rych'])) + if geo_ids: + cur.execute("SELECT geo FROM feed_article_meta WHERE id=%s AND geo IS NOT NULL UNION SELECT latitude::text||' '||longitude::text AS geo FROM geonames WHERE id IN (%s)" % (article['id'], ','.join(map(str, geo_ids)),) ) + geo_coords = [row['geo'] for row in cur] + # update the DB + cur.execute("UPDATE feed_article_meta SET geo=%s WHERE id=%s", (';'.join(geo_coords), article['id'],)) + cur.connection.commit() + # update the zmq object; `geo_coords` includes the old entries, so we just override + article['geo'] = map(lat_lon_dict, geo_coords) + + +def enrycher_worker(in_queue, out_queue, url=None): + """ + Worker thread. Takes an article dict from in_queue, adds the enrycher xml, + puts the enryched article in out_queue. + If `url` is given, queries Enrycher at that URL, otherwise the URL is constructed + based on the language of each artcile in in_queue. + """ + conn, cur = openConnection('rych info writer') + while True: + try: + article = in_queue.get() + lang = article.get('lang','').split('-')[0] + + # auto-detect URL + if not url: + if lang in ('en','eng','enz'): + if 0 and article.get('google_story_id'): + url = 'http://aidemo.ijs.si:8080/EnrycherWeb-render/run-render' # all + stanford parses + sentiment + else: + url = 'http://aidemo.ijs.si:8080/EnrycherWeb-render/run-demo' + elif lang in ('sl','slv'): + url = 'http://aidemo.ijs.si:8080/EnrycherWeb-render/sl-run' + else: + raise ValueError('Unsupported language: %r' % lang) + + #print '[%s] pre-enrych %s' % (threading.currentThread().name, article['id']) + #print article['id'], lang, `article.get('google_story_id')`, url + article['rych'] = enrych(article['cleartext'], url) + #print '[%s] pre-db %s' % (threading.currentThread().name, article['id']) + DB_write_rych_info(cur, article) + #print '[%s] pre-out-enqueue %s' % (threading.currentThread().name, article['id']) + out_queue.put(article) + + except Exception as exc: + # pass through the unenryched article + out_queue.put(article) + + # report error + print '!! error while processing article %s (lang %s) at %r' % (article.get('id'), article.get('lang'), url) + txt = article.get('cleartext', '').replace('\n',' ') + print 'Some stats about the input data: %d bytes, %d sentences, max sentence length %d bytes. File saved to /tmp/bad_enrycher_input' % ( + len(txt), len(txt.split('. ')), max(map(len,txt.split('. '))+[-1]) ) + print exc, exc.args + try: + with open('/tmp/bad_enrycher_input','w') as badf: + badf.write(txt.encode('utf8')) + except: + print '(file not saved, IOError)' + +if __name__=='__main__': + zmqctx = zmq.Context() + + sock_txt = zmqctx.socket(zmq.SUB) + sock_txt.connect ("tcp://localhost:13371") + sock_txt.setsockopt(zmq.SUBSCRIBE, "") + + sock_rych = zmqctx.socket(zmq.PUB) + sock_rych.setsockopt(zmq.HWM, 100) + sock_rych.bind('tcp://*:13372') + + # input and output queues for worker threads that call enrycher. (zmq is only used in the main thread) + in_queue = Queue(maxsize=MAX_ENRYCHER_REQUESTS) + out_queue = Queue(maxsize=100*MAX_ENRYCHER_REQUESTS) + + # prepare worker threads + for i in range(MAX_ENRYCHER_REQUESTS): + worker = threading.Thread(target=enrycher_worker, args=(in_queue,out_queue)) + worker.start() + + try: + while True: + if in_queue.full(): + print 'sleep ... %d:%d ...' % (in_queue.qsize(), out_queue.qsize(),), + time.sleep(1) + print '!' + + if not in_queue.full() and zmq.select([sock_txt], [], [], 3)[0]: + article = sock_txt.recv_pyobj() + if is_enrychable(article): + print 'enqueued %s (lang=%r)' % (article['id'], article['lang']) + print '%d:%d' % (in_queue.qsize(), out_queue.qsize(),), + in_queue.put(article) + else: + print 'ignored %s (lang=%r)' % (article['id'], article['lang']) + print '%d:%d' % (in_queue.qsize(), out_queue.qsize(),), + out_queue.put(article) + + while not out_queue.empty(): + article = out_queue.get() + print '%d:%d' % (in_queue.qsize(), out_queue.qsize(),), + if 'rych' in article: + print 'done %s, %d bytes of xml' % (article['id'], len(article['rych'])) + sock_rych.send_pyobj(article) + except: + traceback.print_exc() + finally: + sock_txt.close() + sock_rych.close() + zmqctx.term() diff --git a/zmq2zmq_xenrych.py b/zmq2zmq_xenrych.py new file mode 100755 index 0000000..2b47374 --- /dev/null +++ b/zmq2zmq_xenrych.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +""" +Subscribes to the zmq feed of cleartexted articles. +Pushes them (multithreaded) to enrycher, publishes +enryched documents to a new zmq socket. +Also extracts and dumps some selected info from enrycher output +back into the feed_article_meta table in DB. + +ZMQ subscribes to: zmq2zmq_enrych.py +ZMQ subscribers: zmq2zmq_bloombergness.py +""" + +import os, sys, traceback +import urllib,urllib2 +import time +import zmq +import re +import lxml.etree as etree + +sys.path.extend(('.', '..')) +import serialize + +def build_xlike_url(article): + """ + Return the URL to which to sent `article`, depending on article['lang']. + **Return None** if the article language is not supported. + """ + #return None + xlang = {'cat':'ca', 'eng':'en', 'spa':'es'}.get(article.get('lang')) + if not xlang: return None + return 'http://sandbox-xlike.isoco.com/services/analysis_%s/analyze' % xlang + +tst="""ON a rainy day in the +late 17th century, an enterprising agent of the British East India +Company named Job Charnock sailed along the Hooghly River, a tributary +of the Ganges that flows from high in the Himalayas into the Bay of +Bengal, and pitched a tent on its swampy banks. The company bought three +riverside villages. Soon they would become a port - flowing with opium, +muslin and jute - and then, as the capital of British India until 1912, +draw conquerors, dreamers and hungry folk from all over the world. +""" + +def add_xenrycher_data(article): + """ + Query XLIKE enrycher-like services to obtain the rych version of article. + If things go well, store the rych version in article['xrych']. + Returns None. + """ + try: + # build query string + query = urllib.urlencode({'text': article['cleartext'].encode('utf8','replace')}).replace('+','%20') + # do the request + req = urllib2.Request(url=build_xlike_url(article), data=query) + f = urllib2.urlopen(req, timeout=1) + retval = f.read().decode('utf8','replace') + # reformat the XML + try: retval = etree.tostring(etree.fromstring(retval), pretty_print=True) + except: pass + article['xrych'] = retval + print 'OK, %d bytes of xrych xml' % len(retval or '') + except: + import tempfile + traceback.print_exc() + #tmp_path = tempfile.mktemp(prefix='isoco', suffix='.tmp') + #with open(tmp_path,'w') as f: + # f.write(query) + #print 'written', tmp_path + +if __name__=='__main__': + zmqctx = zmq.Context() + + sock_txt = zmqctx.socket(zmq.SUB) + sock_txt.connect ("tcp://localhost:13372") + sock_txt.setsockopt(zmq.SUBSCRIBE, "") + + sock_rych = zmqctx.socket(zmq.PUB) + sock_rych.setsockopt(zmq.HWM, 100) + sock_rych.bind('tcp://*:13373') + + try: + while True: + while not zmq.select([sock_txt], [], [], 3)[0]: + time.sleep(.1) + article = sock_txt.recv_pyobj() + + if build_xlike_url(article): + print 'processing %r (%s)' % (article['id'], article['lang'],) + add_xenrycher_data(article) + else: + print '(%s %s)' % (article['id'], article['lang']) + + sock_rych.send_pyobj(article) + except: + traceback.print_exc() + finally: + sock_txt.close() + sock_rych.close() + zmqctx.term()