From 692ab47448afcc428dcb84cc8ef7f6748a1c8f86 Mon Sep 17 00:00:00 2001 From: Mart Lubbers Date: Thu, 2 Oct 2014 11:57:30 +0200 Subject: [PATCH] update --- program/everything/contextmenu_o.js | 23 +++++++++++++++++------ program/everything/crawler.py | 15 ++++++++++++--- program/everything/index.py | 22 ++++++++++++---------- program/everything/input_app.py | 14 ++++++++++---- program/everything/main.html.t | 2 -- program/everything/new.html.t | 6 +++++- 6 files changed, 56 insertions(+), 26 deletions(-) diff --git a/program/everything/contextmenu_o.js b/program/everything/contextmenu_o.js index b04c2b2..b7b6d1f 100644 --- a/program/everything/contextmenu_o.js +++ b/program/everything/contextmenu_o.js @@ -87,13 +87,24 @@ function f_clear() { } } -function post_all() { +function post_all(num) { var xmlhttp = new XMLHttpRequest(); - xmlhttp.onreadystatechange=function() - { - if (xmlhttp.readyState==4) + if(num == 1){ + xmlhttp.onreadystatechange=function() { - document.write(xmlhttp.responseText); + if (xmlhttp.readyState==4) + { + document.write(xmlhttp.responseText); + } + } + } else { + xmlhttp.onreadystatechange=function() + { + if (xmlhttp.readyState==4) + { + field = document.getElementById("preview_field"); + field.innerHTML = xmlhttp.responseText; + } } } var params = "content="+encodeURIComponent(document.getElementById("content-table").innerHTML); @@ -102,7 +113,7 @@ function post_all() { for (var i = 0; i= time_wait + def full_run(self, name): + if name not in self.entries: + return [] + results = [] + entry = self.entries[name] + feed = feedparser.parse('http://' + entry['url']) + for i in feed.entries: + results.append( + (i, self.test_entry(name, i['title'], i['summary']))) + return results + def run_entry(self, name): edict = self.entries[name] if 'db' not in edict: diff --git a/program/everything/index.py b/program/everything/index.py index 62bb377..146fa67 100644 --- a/program/everything/index.py +++ b/program/everything/index.py @@ -73,15 +73,12 @@ def crawler_test(req, args, apok): if 'name' not in args or str(args['name']) not in cr.entries: req.write('Name not in the crawler...
') else: - args['summary'] = args.get('summary', '') - args['title'] = args.get('title', '') - respons = cr.test_entry(str(args['name']), str(args['title']), - str(args['summary'])) - req.write('Title: {}
Summary: {}
'.format( - str(args['title']), str(args['summary']))) - req.write('
Results:
') - req.write('
'.join('{}: {}'.format(k, v) - for k, v in sorted(respons.iteritems()))) + data = cr.full_run(args['name']) + for entry, dc in data: + req.write(u'
entry: {title}, {summary}
'. + format(**entry).encode('utf-8')) + for k, v in dc.iteritems(): + req.write(u'{}: {}
'.format(k, v).encode('utf-8')) req.write('

Go back') return apok @@ -98,7 +95,7 @@ def feed2html(url, name): 'cel-->').format( i['title'].encode('ascii', 'xmlcharrefreplace'), i['summary'].encode('ascii', 'xmlcharrefreplace')) - return result + return result + '' def input_data(dictio): @@ -142,3 +139,8 @@ def crawler_new(req, args, apok): 'feed_html': feed2html(args['url'], args['name'])} req.write(data.format(**params)) return apok + +def crawler_preview(req, args, apok): + req.log_error('handler') + req.content_type = 'text/html' + req.send_http_header() diff --git a/program/everything/input_app.py b/program/everything/input_app.py index 107cd69..dc57287 100644 --- a/program/everything/input_app.py +++ b/program/everything/input_app.py @@ -14,7 +14,7 @@ def req_pre_pos(req): req.content_type = 'text/html' req.send_http_header() args = util.FieldStorage(req) - listing = data_main(args) + listing, crawl = data_main(args) req.write( '\n\n' '\tVER: 0.01 - HyperFrontend RSS feed POSTREQUEST' @@ -152,8 +152,8 @@ def data_main(d, force=False): del(d['matchdata']) crawl = crawler.Crawler() crawl.add_entry(d) - status = crawl.write() - return crawl.list_names() + crawl.write() + return crawl.list_names(), crawl def feed2html(req, url, name): @@ -183,7 +183,13 @@ def handler(req): return index.crawler_new(req, util.FieldStorage(req), apache.OK) else: if req.method == "POST": - req_pre_pos(req) + if req.uri.split('/')[-1] == 'hyper.py': + req_pre_pos(req) + elif req.uri.split('/')[-1] == 'preview.py': + args = util.FieldStorage(req) + listing, crawl = data_main(args) + return index.crawler_test(req, {'name': args['name']}, + apache.OK) else: req.write('Unknown case') return apache.OK diff --git a/program/everything/main.html.t b/program/everything/main.html.t index b7c571b..87c0fba 100644 --- a/program/everything/main.html.t +++ b/program/everything/main.html.t @@ -44,8 +44,6 @@ {active_crawlers_dropdown} - Title: - Summary: diff --git a/program/everything/new.html.t b/program/everything/new.html.t index 1ecfbbd..7ba7602 100644 --- a/program/everything/new.html.t +++ b/program/everything/new.html.t @@ -14,6 +14,10 @@
Loading "{url}" as

{rssname}


{feed_html} - + + +
+

Preview:

+

-- 2.20.1