Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
61 changes: 61 additions & 0 deletions var/www/blueprints/api_rest.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,67 @@ def lacus_cookiejar_import():
return Response(json.dumps(res[0]), mimetype='application/json'), res[1]


# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # CRAWLER SCHEDULER / STATS # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #

@api_rest.route("api/v1/crawler/scheduler", methods=['GET'])
@token_required('user')
def get_crawler_scheduler():
res = crawlers.get_schedulers_metas()
# Ensure tags sets are JSON-serializable
for meta in res:
if isinstance(meta.get('tags'), set):
meta['tags'] = list(meta['tags'])
return create_json_response(res, 200)
Comment thread
cln-io marked this conversation as resolved.

@api_rest.route("api/v1/crawler/schedule/<path:schedule_uuid>", methods=['DELETE'])
@token_required('admin')
def delete_crawler_schedule(schedule_uuid):
data = {'uuid': schedule_uuid}
res = crawlers.api_delete_schedule(data)
# Ensure error payloads are JSON-serializable (uuid may be a CrawlerSchedule object)
if isinstance(res[0], dict):
res_dict = {k: str(v) if not isinstance(v, (str, int, float, bool, list, dict, type(None))) else v
for k, v in res[0].items()}
else:
res_dict = res[0]
return create_json_response(res_dict, res[1])

@api_rest.route("api/v1/crawler/captures/status", methods=['GET'])
@token_required('user')
def get_crawler_captures_status():
res = crawlers.get_captures_status()
return create_json_response(res, 200)

@api_rest.route("api/v1/crawler/stats", methods=['GET'])
@token_required('user')
def get_crawler_stats():
domain_type = request.args.get('domain_type')
res = crawlers.get_crawlers_stats(domain_type=domain_type)
return create_json_response(res, 200)

@api_rest.route("api/v1/crawler/blocklist", methods=['GET'])
@token_required('admin')
def get_crawler_blocklist():
res = crawlers.get_blacklist()
return create_json_response(list(res), 200)

@api_rest.route("api/v1/crawler/blocklist", methods=['POST'])
@token_required('admin')
def add_crawler_blocklist():
data = request.get_json()
res = crawlers.api_blacklist_domain(data)
Comment thread
cln-io marked this conversation as resolved.
return create_json_response(res[0], res[1])

@api_rest.route("api/v1/crawler/blocklist/<path:domain>", methods=['DELETE'])
@token_required('admin')
def delete_crawler_blocklist(domain):
data = {'domain': domain}
res = crawlers.api_unblacklist_domain(data)
return create_json_response(res[0], res[1])


# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # IMPORTERS # # # # # # # # # # # # # # # # #
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
Expand Down