Fix key error + log exceptions to error.log

This commit is contained in:
relikd
2020-09-05 23:30:37 +02:00
parent 6f215595b6
commit 724d7ab319
4 changed files with 38 additions and 26 deletions

View File

@@ -55,7 +55,7 @@ function make_output($msg, $url=null, $when=null, $key=null) {
function response_success($bundle_id, $key) { function response_success($bundle_id, $key) {
$url = $bundle_id ? 'https://appchk.de/app/'.$bundle_id.'/index.html' : null; $url = $bundle_id ? 'https://appchk.de/app/'.$bundle_id.'/index.html' : null;
# next update will be in ... X seconds (up to 1 min) # next update will be in ... X seconds (up to 1 min)
make_output('ok', $url, ceil(time()/120)*120 - time(), $key); make_output('ok', $url, ceil(time()/60)*60 - time(), $key);
} }
function response_fail($error) { function response_fail($error) {

View File

@@ -34,14 +34,14 @@ def dict_increment(ddic, key, num):
def json_combine(bundle_id): def json_combine(bundle_id):
res = dict() res = dict({'#rec': 0, '#logs': 0})
domA = dict() # unique sub domains domA = dict() # unique sub domains
domB = dict() # total sub domains domB = dict() # total sub domains
domC = dict() # unique parent domains domC = dict() # unique parent domains
domD = dict() # total parent domains domD = dict() # total parent domains
for fname, jdata in mylib.enum_jsons(bundle_id): for fname, jdata in mylib.enum_jsons(bundle_id):
res['name'] = jdata['app-name'] res['name'] = jdata['app-name']
dict_increment(res, '#rec', 1) res['#rec'] += 1
dict_increment(res, 'rec-total', jdata['duration']) dict_increment(res, 'rec-total', jdata['duration'])
try: try:
logs = jdata['logs'] logs = jdata['logs']
@@ -49,7 +49,7 @@ def json_combine(bundle_id):
for subdomain in logs: for subdomain in logs:
occurs = len(logs[subdomain]) occurs = len(logs[subdomain])
sub_tracker = tracker.is_tracker(subdomain) sub_tracker = tracker.is_tracker(subdomain)
dict_increment(res, '#logs', occurs) res['#logs'] += 1
dict_increment(domA, subdomain, 1) dict_increment(domA, subdomain, 1)
dict_increment(domB, subdomain, occurs) dict_increment(domB, subdomain, occurs)
par_dom = get_parent_domain(subdomain) par_dom = get_parent_domain(subdomain)

View File

@@ -6,6 +6,7 @@ import glob
import json import json
import shutil import shutil
import logging import logging
import traceback
from pathlib import Path from pathlib import Path
import urllib.request as curl import urllib.request as curl
@@ -77,6 +78,8 @@ def valid_bundle_id(bundle_id):
def err(scope, msg, logOnly=False): def err(scope, msg, logOnly=False):
if isinstance(msg, Exception):
msg = traceback.format_exc()
logger.error('[{}] {}'.format(scope, msg)) logger.error('[{}] {}'.format(scope, msg))
if not logOnly: if not logOnly:
print(' [ERROR] ' + msg) print(' [ERROR] ' + msg)

View File

@@ -24,14 +24,20 @@ def print_usage_and_exit():
def del_id(bundle_ids): def del_id(bundle_ids):
print('removing apps from website:')
if bundle_ids == ['*']: if bundle_ids == ['*']:
bundle_ids = list(mylib.enum_appids()) bundle_ids = list(mylib.enum_appids())
update_index = False
for bid in bundle_ids: for bid in bundle_ids:
dest = mylib.path_out_app(bid) dest = mylib.path_out_app(bid)
if mylib.dir_exists(dest): if mylib.dir_exists(dest):
print(' ' + bid)
mylib.rm(dest) mylib.rm(dest)
html_index.process() update_index = True
print('')
if update_index:
html_index.process()
def combine_and_update(bundle_ids, where=None, forceGraphs=False): def combine_and_update(bundle_ids, where=None, forceGraphs=False):
@@ -70,25 +76,28 @@ def tracker_update():
combine_and_update(['*'], where=new_trackers) combine_and_update(['*'], where=new_trackers)
if __name__ == '__main__': try:
args = sys.argv[1:] if __name__ == '__main__':
if len(args) == 0: args = sys.argv[1:]
print_usage_and_exit() if len(args) == 0:
cmd = args[0]
params = args[1:]
if cmd == 'import':
import_update()
elif cmd == 'del':
if len(params) == 0:
print_usage_and_exit() print_usage_and_exit()
del_id(params) # ['_manually'] cmd = args[0]
elif cmd == 'run': params = args[1:]
if len(params) == 0: if cmd == 'import':
print_usage_and_exit() import_update()
combine_and_update(params) # ['*'], where=['test.com'] elif cmd == 'del':
elif cmd == 'icons': if len(params) == 0:
if bundle_download.download_missing_icons(force=False): print_usage_and_exit()
html_index.process() del_id(params) # ['_manually']
elif cmd == 'tracker': elif cmd == 'run':
tracker_update() if len(params) == 0:
# tracker_download.combine_all('x') print_usage_and_exit()
combine_and_update(params) # ['*'], where=['test.com']
elif cmd == 'icons':
if bundle_download.download_missing_icons(force=False):
html_index.process()
elif cmd == 'tracker':
tracker_update()
# tracker_download.combine_all('x')
except Exception as e:
mylib.err('critical', e)