Skip to content

Commit 50c9822

Browse files
author
laurin
committed
s3 client for r2 storage code duplication removed. and also removed librarytrigger
1 parent 0e294cc commit 50c9822

4 files changed

Lines changed: 59 additions & 147 deletions

File tree

sebs/cloudflare/cloudflare.py

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -514,11 +514,7 @@ def cached_function(self, function: Function):
514514
Args:
515515
function: The cached function
516516
"""
517-
from sebs.cloudflare.triggers import LibraryTrigger, HTTPTrigger
518-
519-
for trigger in function.triggers(Trigger.TriggerType.LIBRARY):
520-
trigger.logging_handlers = self.logging_handlers
521-
cast(LibraryTrigger, trigger).deployment_client = self
517+
from sebs.cloudflare.triggers import HTTPTrigger
522518

523519
for trigger in function.triggers(Trigger.TriggerType.HTTP):
524520
trigger.logging_handlers = self.logging_handlers
@@ -779,15 +775,11 @@ def create_trigger(
779775
Returns:
780776
The created trigger
781777
"""
782-
from sebs.cloudflare.triggers import LibraryTrigger, HTTPTrigger
778+
from sebs.cloudflare.triggers import HTTPTrigger
783779

784780
worker = cast(CloudflareWorker, function)
785781

786-
if trigger_type == Trigger.TriggerType.LIBRARY:
787-
trigger = LibraryTrigger(worker.name, self)
788-
trigger.logging_handlers = self.logging_handlers
789-
return trigger
790-
elif trigger_type == Trigger.TriggerType.HTTP:
782+
if trigger_type == Trigger.TriggerType.HTTP:
791783
account_id = worker.account_id or self.config.credentials.account_id
792784
worker_url = self._build_workers_dev_url(worker.name, account_id)
793785
trigger = HTTPTrigger(worker.name, worker_url)

sebs/cloudflare/function.py

Lines changed: 2 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def serialize(self) -> dict:
4040
@staticmethod
4141
def deserialize(cached_config: dict) -> "CloudflareWorker":
4242
from sebs.faas.function import Trigger
43-
from sebs.cloudflare.triggers import LibraryTrigger, HTTPTrigger
43+
from sebs.cloudflare.triggers import HTTPTrigger
4444

4545
cfg = FunctionConfig.deserialize(cached_config["config"])
4646
ret = CloudflareWorker(
@@ -54,14 +54,7 @@ def deserialize(cached_config: dict) -> "CloudflareWorker":
5454
)
5555

5656
for trigger in cached_config["triggers"]:
57-
mapping = {
58-
LibraryTrigger.typename(): LibraryTrigger,
59-
HTTPTrigger.typename(): HTTPTrigger
60-
}
61-
trigger_type = cast(
62-
Trigger,
63-
mapping.get(trigger["type"]),
64-
)
57+
trigger_type = HTTPTrigger if trigger["type"] == HTTPTrigger.typename() else None
6558
assert trigger_type, "Unknown trigger type {}".format(trigger["type"])
6659
ret.add_trigger(trigger_type.deserialize(trigger))
6760

sebs/cloudflare/r2.py

Lines changed: 54 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ def __init__(
3535
):
3636
super().__init__(region, cache_client, resources, replace_existing)
3737
self._credentials = credentials
38+
self._s3_client = None
3839

3940
def _get_auth_headers(self) -> dict[str, str]:
4041
"""Get authentication headers for Cloudflare API requests."""
@@ -52,6 +53,46 @@ def _get_auth_headers(self) -> dict[str, str]:
5253
else:
5354
raise RuntimeError("Invalid Cloudflare credentials configuration")
5455

56+
def _get_s3_client(self):
57+
"""
58+
Get or initialize the S3-compatible client for R2 operations.
59+
60+
:return: boto3 S3 client or None if credentials not available
61+
"""
62+
if self._s3_client is not None:
63+
return self._s3_client
64+
65+
# Check if we have S3-compatible credentials
66+
if not self._credentials.r2_access_key_id or not self._credentials.r2_secret_access_key:
67+
self.logging.warning(
68+
"R2 S3-compatible API credentials not configured. "
69+
"Set CLOUDFLARE_R2_ACCESS_KEY_ID and CLOUDFLARE_R2_SECRET_ACCESS_KEY environment variables."
70+
)
71+
return None
72+
73+
try:
74+
import boto3
75+
from botocore.config import Config
76+
77+
account_id = self._credentials.account_id
78+
79+
self._s3_client = boto3.client(
80+
's3',
81+
endpoint_url=f'https://{account_id}.r2.cloudflarestorage.com',
82+
aws_access_key_id=self._credentials.r2_access_key_id,
83+
aws_secret_access_key=self._credentials.r2_secret_access_key,
84+
config=Config(signature_version='s3v4'),
85+
region_name='auto'
86+
)
87+
88+
return self._s3_client
89+
90+
except ImportError:
91+
self.logging.warning(
92+
"boto3 not available. Install with: pip install boto3"
93+
)
94+
return None
95+
5596
def correct_name(self, name: str) -> str:
5697
return name
5798

@@ -142,33 +183,12 @@ def upload(self, bucket_name: str, filepath: str, key: str):
142183
:param filepath: local source filepath
143184
:param key: R2 destination key/path
144185
"""
186+
s3_client = self._get_s3_client()
187+
if s3_client is None:
188+
self.logging.warning(f"Cannot upload {filepath} to R2 - S3 client not available")
189+
return
190+
145191
try:
146-
import boto3
147-
from botocore.config import Config
148-
149-
account_id = self._credentials.account_id
150-
151-
# R2 uses S3-compatible API, but requires special configuration
152-
# The endpoint is: https://<account_id>.r2.cloudflarestorage.com
153-
# You need to create R2 API tokens in the Cloudflare dashboard
154-
155-
# Check if we have S3-compatible credentials
156-
if not self._credentials.r2_access_key_id or not self._credentials.r2_secret_access_key:
157-
self.logging.warning(
158-
"R2 upload requires S3-compatible API credentials (r2_access_key_id, r2_secret_access_key). "
159-
"File upload skipped. Set CLOUDFLARE_R2_ACCESS_KEY_ID and CLOUDFLARE_R2_SECRET_ACCESS_KEY."
160-
)
161-
return
162-
163-
s3_client = boto3.client(
164-
's3',
165-
endpoint_url=f'https://{account_id}.r2.cloudflarestorage.com',
166-
aws_access_key_id=self._credentials.r2_access_key_id,
167-
aws_secret_access_key=self._credentials.r2_secret_access_key,
168-
config=Config(signature_version='s3v4'),
169-
region_name='auto'
170-
)
171-
172192
with open(filepath, 'rb') as f:
173193
s3_client.put_object(
174194
Bucket=bucket_name,
@@ -178,11 +198,6 @@ def upload(self, bucket_name: str, filepath: str, key: str):
178198

179199
self.logging.debug(f"Uploaded {filepath} to R2 bucket {bucket_name} as {key}")
180200

181-
except ImportError:
182-
self.logging.warning(
183-
"boto3 not available. Install with: pip install boto3. "
184-
"File upload to R2 skipped."
185-
)
186201
except Exception as e:
187202
self.logging.warning(f"Failed to upload {filepath} to R2: {e}")
188203

@@ -194,28 +209,12 @@ def upload_bytes(self, bucket_name: str, key: str, data: bytes):
194209
:param key: R2 destination key/path
195210
:param data: bytes to upload
196211
"""
212+
s3_client = self._get_s3_client()
213+
if s3_client is None:
214+
self.logging.warning(f"Cannot upload bytes to R2 - S3 client not available")
215+
return
216+
197217
try:
198-
import boto3
199-
from botocore.config import Config
200-
201-
account_id = self._credentials.account_id
202-
203-
if not self._credentials.r2_access_key_id or not self._credentials.r2_secret_access_key:
204-
self.logging.warning(
205-
"R2 upload requires S3-compatible API credentials (r2_access_key_id, r2_secret_access_key). "
206-
"Upload skipped. Set CLOUDFLARE_R2_ACCESS_KEY_ID and CLOUDFLARE_R2_SECRET_ACCESS_KEY environment variables."
207-
)
208-
return
209-
210-
s3_client = boto3.client(
211-
's3',
212-
endpoint_url=f'https://{account_id}.r2.cloudflarestorage.com',
213-
aws_access_key_id=self._credentials.r2_access_key_id,
214-
aws_secret_access_key=self._credentials.r2_secret_access_key,
215-
config=Config(signature_version='s3v4'),
216-
region_name='auto'
217-
)
218-
219218
s3_client.put_object(
220219
Bucket=bucket_name,
221220
Key=key,
@@ -224,10 +223,6 @@ def upload_bytes(self, bucket_name: str, key: str, data: bytes):
224223

225224
self.logging.debug(f"Uploaded {len(data)} bytes to R2 bucket {bucket_name} as {key}")
226225

227-
except ImportError:
228-
self.logging.warning(
229-
"boto3 not available. Install with: pip install boto3"
230-
)
231226
except Exception as e:
232227
self.logging.warning(f"Failed to upload bytes to R2: {e}")
233228

@@ -246,27 +241,12 @@ def list_bucket(self, bucket_name: str, prefix: str = "") -> List[str]:
246241
:param prefix: optional prefix filter
247242
:return: list of files in a given bucket
248243
"""
249-
# Use S3-compatible API with R2 credentials
250-
if not self._credentials.r2_access_key_id or not self._credentials.r2_secret_access_key:
251-
self.logging.warning(f"R2 S3 credentials not configured, cannot list bucket {bucket_name}")
244+
s3_client = self._get_s3_client()
245+
if s3_client is None:
246+
self.logging.warning(f"Cannot list R2 bucket {bucket_name} - S3 client not available")
252247
return []
253248

254249
try:
255-
import boto3
256-
from botocore.config import Config
257-
258-
account_id = self._credentials.account_id
259-
r2_endpoint = f"https://{account_id}.r2.cloudflarestorage.com"
260-
261-
s3_client = boto3.client(
262-
's3',
263-
endpoint_url=r2_endpoint,
264-
aws_access_key_id=self._credentials.r2_access_key_id,
265-
aws_secret_access_key=self._credentials.r2_secret_access_key,
266-
config=Config(signature_version='s3v4'),
267-
region_name='auto'
268-
)
269-
270250
# List objects with optional prefix
271251
paginator = s3_client.get_paginator('list_objects_v2')
272252
page_iterator = paginator.paginate(Bucket=bucket_name, Prefix=prefix)

sebs/cloudflare/triggers.py

Lines changed: 0 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -4,59 +4,6 @@
44
from sebs.faas.function import Trigger, ExecutionResult
55

66

7-
class LibraryTrigger(Trigger):
8-
"""
9-
Library trigger for Cloudflare Workers.
10-
Allows invoking workers programmatically via the Cloudflare API.
11-
"""
12-
13-
def __init__(self, worker_name: str, deployment_client=None):
14-
super().__init__()
15-
self.worker_name = worker_name
16-
self.deployment_client = deployment_client
17-
18-
@staticmethod
19-
def typename() -> str:
20-
return "Cloudflare.LibraryTrigger"
21-
22-
@staticmethod
23-
def trigger_type() -> Trigger.TriggerType:
24-
return Trigger.TriggerType.LIBRARY
25-
26-
def sync_invoke(self, payload: dict) -> ExecutionResult:
27-
"""
28-
Synchronously invoke a Cloudflare Worker.
29-
30-
Args:
31-
payload: The payload to send to the worker
32-
33-
Returns:
34-
ExecutionResult with performance metrics
35-
"""
36-
# This will be implemented when we have the deployment client
37-
raise NotImplementedError("Cloudflare Worker invocation not yet implemented")
38-
39-
def async_invoke(self, payload: dict) -> concurrent.futures.Future:
40-
"""
41-
Asynchronously invoke a Cloudflare Worker.
42-
Not typically supported for Cloudflare Workers.
43-
"""
44-
raise NotImplementedError("Cloudflare Workers do not support async invocation")
45-
46-
def serialize(self) -> dict:
47-
"""Serialize the LibraryTrigger."""
48-
return {
49-
"type": self.typename(),
50-
"worker_name": self.worker_name,
51-
}
52-
53-
@staticmethod
54-
def deserialize(cached_config: dict) -> "LibraryTrigger":
55-
"""Deserialize a LibraryTrigger from cached config."""
56-
from sebs.cloudflare.triggers import LibraryTrigger
57-
return LibraryTrigger(cached_config["worker_name"])
58-
59-
607
class HTTPTrigger(Trigger):
618
"""
629
HTTP trigger for Cloudflare Workers.

0 commit comments

Comments
 (0)