diff --git a/var/www/blueprints/api_rest.py b/var/www/blueprints/api_rest.py index 8657ce3f..2c43067d 100644 --- a/var/www/blueprints/api_rest.py +++ b/var/www/blueprints/api_rest.py @@ -164,6 +164,67 @@ def lacus_cookiejar_import(): return Response(json.dumps(res[0]), mimetype='application/json'), res[1] +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +# # # # # # # # # # # # CRAWLER SCHEDULER / STATS # # # # # # # # # # # # # # +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # + +@api_rest.route("api/v1/crawler/scheduler", methods=['GET']) +@token_required('user') +def get_crawler_scheduler(): + res = crawlers.get_schedulers_metas() + # Ensure tags sets are JSON-serializable + for meta in res: + if isinstance(meta.get('tags'), set): + meta['tags'] = list(meta['tags']) + return create_json_response(res, 200) + +@api_rest.route("api/v1/crawler/schedule/", methods=['DELETE']) +@token_required('admin') +def delete_crawler_schedule(schedule_uuid): + data = {'uuid': schedule_uuid} + res = crawlers.api_delete_schedule(data) + # Ensure error payloads are JSON-serializable (uuid may be a CrawlerSchedule object) + if isinstance(res[0], dict): + res_dict = {k: str(v) if not isinstance(v, (str, int, float, bool, list, dict, type(None))) else v + for k, v in res[0].items()} + else: + res_dict = res[0] + return create_json_response(res_dict, res[1]) + +@api_rest.route("api/v1/crawler/captures/status", methods=['GET']) +@token_required('user') +def get_crawler_captures_status(): + res = crawlers.get_captures_status() + return create_json_response(res, 200) + +@api_rest.route("api/v1/crawler/stats", methods=['GET']) +@token_required('user') +def get_crawler_stats(): + domain_type = request.args.get('domain_type') + res = crawlers.get_crawlers_stats(domain_type=domain_type) + return create_json_response(res, 200) + +@api_rest.route("api/v1/crawler/blocklist", methods=['GET']) +@token_required('admin') +def get_crawler_blocklist(): + res = crawlers.get_blacklist() + return create_json_response(list(res), 200) + +@api_rest.route("api/v1/crawler/blocklist", methods=['POST']) +@token_required('admin') +def add_crawler_blocklist(): + data = request.get_json() + res = crawlers.api_blacklist_domain(data) + return create_json_response(res[0], res[1]) + +@api_rest.route("api/v1/crawler/blocklist/", methods=['DELETE']) +@token_required('admin') +def delete_crawler_blocklist(domain): + data = {'domain': domain} + res = crawlers.api_unblacklist_domain(data) + return create_json_response(res[0], res[1]) + + # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # IMPORTERS # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #