From 3dca2ce426fdf0b6d1d37a2513805fdde7b3b3a9 Mon Sep 17 00:00:00 2001 From: Mart Lubbers Date: Wed, 24 Sep 2014 19:32:12 +0200 Subject: [PATCH] update --- program/everything/crawler.db | 21 ++++++++--------- program/everything/crawler.py | 11 +++++++-- program/everything/index.py | 42 +++++++++++++++++++++++++++++++--- program/everything/main.html.t | 19 +++++++++++++-- 4 files changed, 74 insertions(+), 19 deletions(-) diff --git a/program/everything/crawler.db b/program/everything/crawler.db index e9287f8..380729a 100644 --- a/program/everything/crawler.db +++ b/program/everything/crawler.db @@ -1,10 +1,10 @@ (dp0 -S'Paradiso_test1' +S'test' p1 (dp2 S'website' p3 -S'www.test.nl' +S't' p4 sS'name' p5 @@ -15,11 +15,11 @@ S'localhost/py/paradiso.rss.xml' p7 sS'dloc' p8 -S'test' +S'ut' p9 sS'venue' p10 -S'p' +S'Para' p11 sS'content' p12 @@ -44,7 +44,7 @@ p23 aS'mede-oprichter Broken Social Scene solo' p24 aa(lp25 -S'vrijdag 4 juli 2014 22:00 - Palenke Soultribe' +S'vrijdag 4 juli 2014 22:00 - Palenke Soultribe' p26 aS'Electronische muziek en Afro-Colombiaanse ritmes' p27 @@ -86,14 +86,11 @@ p47 (lp48 S'\x01 \x02 - \x03 - Locatie: \x04' p49 -aS'\x01 \x02 - \x03' -p50 asS'freq' -p51 +p50 S'1w' -p52 +p51 sS'adress' -p53 -S'adres' -p54 +p52 +g4 ss. \ No newline at end of file diff --git a/program/everything/crawler.py b/program/everything/crawler.py index a377d5b..52c3f2c 100644 --- a/program/everything/crawler.py +++ b/program/everything/crawler.py @@ -54,22 +54,28 @@ class Crawler(): self.entries[d['name']] = d def write(self, path='/var/www/py/crawler.db'): + status = '' entries2 = {kk: {k: v for k, v in vv.iteritems() if k not in ['summarydawg_t', 'titledawg_t']} for kk, vv in self.entries.iteritems()} if os.path.exists(path): os.rename(path, '{}.bak'.format(path)) + status += 'Old crawler file found, created backup
' try: with open(path, 'wb') as f: f.write(pickle.dumps(entries2)) + status += 'Crawler written succesfully
' except Exception, e: # print 'something went wrong writing: {}'.format(e) # print 'restoring backup' - raise e + status += 'Something went wrong: {}
'.format(e) os.rename('{}.bak'.format(path), path) + status += 'Writing failed, restored backup
' finally: if os.path.exists('{}.bak'.format(path)): os.remove('{}.bak'.format(path)) + status += 'Backup file removed
' + return status def get_regex(self, name): d_t = self.entries[name]['titledawg_t'] @@ -162,7 +168,8 @@ class Crawler(): if matchs: matches = sorted(matchs, key=lambda x: len(x.groups())) results['summary'] = list(reversed(matches))[0].groupdict() - return dict(results['summary'].items() + results['title'].items()) + outputdct = dict(results['summary'].items() + results['title'].items()) + return {re.sub('\d', '', k): v for k, v in outputdct.iteritems()} def has_to_run(self, interval, last_run, now): time_wait = sum( diff --git a/program/everything/index.py b/program/everything/index.py index e331645..5143402 100644 --- a/program/everything/index.py +++ b/program/everything/index.py @@ -13,9 +13,6 @@ def index(req, args, apok): cr = crawler.Crawler('/var/www/py/crawler.db') ns = cr.list_names() params = { - 'active_crawlers': - '\n'.join('{0}
'. - format(a) for a in ns), 'active_crawlers_dropdown': '\n'.join(''.format(a) for a in ns) } @@ -24,8 +21,47 @@ def index(req, args, apok): def crawler_edit(req, args, apok): + args['name'] = args.get('name', '') + req.log_error('handler') + req.content_type = 'text/html' + req.send_http_header() + if args['action'] == 'remove': + req.write('Remove {}
'.format(args['name'])) + cr = crawler.Crawler() + status = '' + try: + del(cr.entries[args['name']]) + status = 'Succes...
' + status += cr.write() + except KeyError: + status = 'Name not in the crawler
' + except Exception, e: + status = 'Other exception thrown: {}
'.format(e) + req.write(status) + elif args['action'] == 'edit': + req.write('Edit {}\n'.format(args['name'])) + else: + req.write('Unknown editing action: {}'.format(args['action'])) + req.write('Go back') return apok def crawler_test(req, args, apok): + req.log_error('handler') + req.content_type = 'text/html' + req.send_http_header() + cr = crawler.Crawler(init=True) + if 'name' not in args or str(args['name']) not in cr.entries: + req.write('Name not in the crawler...
') + else: + args['summary'] = args.get('summary', '') + args['title'] = args.get('title', '') + respons = cr.test_entry(str(args['name']), str(args['title']), + str(args['summary'])) + req.write('Title: {}
Summary: {}
'.format( + str(args['title']), str(args['summary']))) + req.write('
Results:
') + req.write('
'.join('{}: {}'.format(k, v) + for k, v in sorted(respons.iteritems()))) + req.write('

Go back') return apok diff --git a/program/everything/main.html.t b/program/everything/main.html.t index 4a7be42..6606818 100644 --- a/program/everything/main.html.t +++ b/program/everything/main.html.t @@ -5,12 +5,27 @@ - +
Inspect/edit crawlerEdit/Remove crawler Add new crawler Test crawler
- {active_crawlers} +
+ + +
+ +
+ +
+ +
+
-- 2.20.1