diff --git a/fetchmail_s3/README.rst b/fetchmail_s3/README.rst new file mode 100644 index 00000000000..9423da27ae7 --- /dev/null +++ b/fetchmail_s3/README.rst @@ -0,0 +1,127 @@ +.. image:: https://odoo-community.org/readme-banner-image + :target: https://odoo-community.org/get-involved?utm_source=readme + :alt: Odoo Community Association + +============ +Fetchmail S3 +============ + +.. + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! This file is generated by oca-gen-addon-readme !! + !! changes will be overwritten. !! + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + !! source digest: sha256:fc00b1de2ce63f15c742cb4dd93f1d517f8f1f16bd16ceea951bcf470a4811f1 + !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +.. |badge1| image:: https://img.shields.io/badge/maturity-Beta-yellow.png + :target: https://odoo-community.org/page/development-status + :alt: Beta +.. |badge2| image:: https://img.shields.io/badge/license-AGPL--3-blue.png + :target: http://www.gnu.org/licenses/agpl-3.0-standalone.html + :alt: License: AGPL-3 +.. |badge3| image:: https://img.shields.io/badge/github-OCA%2Fserver--tools-lightgray.png?logo=github + :target: https://github.com/OCA/server-tools/tree/18.0/fetchmail_s3 + :alt: OCA/server-tools +.. |badge4| image:: https://img.shields.io/badge/weblate-Translate%20me-F47D42.png + :target: https://translation.odoo-community.org/projects/server-tools-18-0/server-tools-18-0-fetchmail_s3 + :alt: Translate me on Weblate +.. |badge5| image:: https://img.shields.io/badge/runboat-Try%20me-875A7B.png + :target: https://runboat.odoo-community.org/builds?repo=OCA/server-tools&target_branch=18.0 + :alt: Try me on Runboat + +|badge1| |badge2| |badge3| |badge4| |badge5| + +Receive incoming emails from an S3-compatible bucket instead of +IMAP/POP. + +This module adds an "S3 Bucket" server type to Odoo's incoming mail +servers. It polls an S3 bucket for raw email files (``.eml``) and +processes them through Odoo's standard mail gateway +(``mail.thread.message_process``). + +**Typical use case**: AWS SES inbound email rules store messages in S3. +This module picks them up on a cron schedule, processes them into Odoo +records (leads, tickets, DMS documents, etc.), then archives or deletes +the S3 objects. + +Works with any S3-compatible storage (AWS S3, MinIO, Hetzner Object +Storage, DigitalOcean Spaces, etc.). + +**Table of contents** + +.. contents:: + :local: + +Configuration +============= + +AWS SES Setup +------------- + +1. Verify your domain in AWS SES (e.g., ``docs.example.com``) +2. Add MX record: + ``docs.example.com MX 10 inbound-smtp.us-east-1.amazonaws.com`` +3. Create an SES receipt rule that stores emails in an S3 bucket +4. Create an IAM user with ``s3:GetObject``, ``s3:ListBucket``, + ``s3:DeleteObject``, ``s3:PutObject`` permissions on the bucket + +Odoo Configuration +------------------ + +1. Go to **Settings → Technical → Incoming Mail Servers** +2. Create a new server with type **S3 Bucket** +3. Fill in: + + - **S3 Bucket Name**: your bucket (e.g., ``my-ses-incoming``) + - **Object Key Prefix**: the prefix SES writes to (e.g., ``emails/``) + - **AWS Region**: the bucket's region (e.g., ``us-east-1``) + - **Access Key ID** and **Secret Access Key**: IAM credentials + - **Endpoint URL**: leave empty for AWS S3, or set for S3-compatible + services + - **Archive Prefix**: where to move processed emails (e.g., + ``processed/``). Leave empty to delete after processing. + +4. Click **Test & Confirm** to verify connectivity +5. Set the **Create a New Record** model (e.g., DMS Directory for + document filing) + +Bug Tracker +=========== + +Bugs are tracked on `GitHub Issues `_. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +`feedback `_. + +Do not contact contributors directly about support or help with technical issues. + +Credits +======= + +Authors +------- + +* Ledo Enterprises + +Contributors +------------ + +- Don Kendall dkendall@ledoweb.com + +Maintainers +----------- + +This module is maintained by the OCA. + +.. image:: https://odoo-community.org/logo.png + :alt: Odoo Community Association + :target: https://odoo-community.org + +OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use. + +This module is part of the `OCA/server-tools `_ project on GitHub. + +You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute. diff --git a/fetchmail_s3/__init__.py b/fetchmail_s3/__init__.py new file mode 100644 index 00000000000..0650744f6bc --- /dev/null +++ b/fetchmail_s3/__init__.py @@ -0,0 +1 @@ +from . import models diff --git a/fetchmail_s3/__manifest__.py b/fetchmail_s3/__manifest__.py new file mode 100644 index 00000000000..6e20d92cb38 --- /dev/null +++ b/fetchmail_s3/__manifest__.py @@ -0,0 +1,20 @@ +# Copyright 2026 Ledo Enterprises +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +{ + "name": "Fetchmail S3", + "version": "18.0.1.0.0", + "category": "Hidden/Tools", + "summary": "Receive incoming emails from an S3-compatible bucket", + "author": "Ledo Enterprises, Odoo Community Association (OCA)", + "website": "https://github.com/OCA/server-tools", + "license": "AGPL-3", + "depends": ["fetchmail"], + "external_dependencies": { + "python": ["boto3"], + }, + "data": [ + "views/fetchmail_server_views.xml", + ], + "installable": True, +} diff --git a/fetchmail_s3/models/__init__.py b/fetchmail_s3/models/__init__.py new file mode 100644 index 00000000000..033e76401e1 --- /dev/null +++ b/fetchmail_s3/models/__init__.py @@ -0,0 +1 @@ +from . import fetchmail_server diff --git a/fetchmail_s3/models/fetchmail_server.py b/fetchmail_s3/models/fetchmail_server.py new file mode 100644 index 00000000000..6e9cf7986ff --- /dev/null +++ b/fetchmail_s3/models/fetchmail_server.py @@ -0,0 +1,208 @@ +# Copyright 2026 Ledo Enterprises +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +import logging + +import boto3 +from botocore.exceptions import ClientError + +from odoo import _, api, fields, models +from odoo.exceptions import UserError + +_logger = logging.getLogger(__name__) + + +class FetchmailServer(models.Model): + _inherit = "fetchmail.server" + + server_type = fields.Selection( + selection_add=[("s3", "S3 Bucket (AWS SES / S3-compatible)")], + ondelete={"s3": "set default"}, + ) + s3_bucket = fields.Char("S3 Bucket Name") + s3_prefix = fields.Char( + "Object Key Prefix", + default="emails/", + help="Only process objects under this prefix.", + ) + s3_region = fields.Char("AWS Region", default="us-east-1") + s3_access_key = fields.Char("Access Key ID") + s3_secret_key = fields.Char("Secret Access Key") + s3_endpoint_url = fields.Char( + "Endpoint URL", + help="For S3-compatible services (MinIO, Hetzner, etc.). " + "Leave empty for AWS S3.", + ) + s3_archive_prefix = fields.Char( + "Archive Prefix", + default="processed/", + help="Move processed emails here instead of deleting. " + "Leave empty to delete after processing.", + ) + + def _compute_server_type_info(self): + s3_servers = self.filtered(lambda s: s.server_type == "s3") + s3_servers.server_type_info = _( + "Poll an S3-compatible bucket for raw email files (.eml). " + "Typically used with AWS SES inbound email rules that store " + "messages in S3. Processed emails are archived or deleted." + ) + return super(FetchmailServer, self - s3_servers)._compute_server_type_info() + + @api.onchange("server_type", "is_ssl", "object_id") + def onchange_server_type(self): + if self.server_type == "s3": + self.server = False + self.port = 0 + self.is_ssl = False + return + return super().onchange_server_type() + + def _get_connection_type(self): + self.ensure_one() + if self.server_type == "s3": + return "s3" + return super()._get_connection_type() + + def _get_s3_client(self): + """Create and return a boto3 S3 client.""" + self.ensure_one() + kwargs = {"region_name": self.s3_region or "us-east-1"} + if self.s3_access_key and self.s3_secret_key: + kwargs["aws_access_key_id"] = self.s3_access_key + kwargs["aws_secret_access_key"] = self.s3_secret_key + if self.s3_endpoint_url: + kwargs["endpoint_url"] = self.s3_endpoint_url + return boto3.client("s3", **kwargs) + + def connect(self, allow_archived=False): + self.ensure_one() + if self._get_connection_type() == "s3": + if not allow_archived and not self.active: + raise UserError( + _( + 'The server "%s" cannot be used because it is archived.', + self.display_name, + ) + ) + return self._get_s3_client() + return super().connect(allow_archived=allow_archived) + + def button_confirm_login(self): + s3_servers = self.filtered(lambda s: s._get_connection_type() == "s3") + for server in s3_servers: + try: + client = server._get_s3_client() + client.list_objects_v2( + Bucket=server.s3_bucket, + Prefix=server.s3_prefix or "", + MaxKeys=1, + ) + server.write({"state": "done"}) + except ClientError as e: + raise UserError( + _("S3 connection failed:\n%s", e.response["Error"]["Message"]) + ) from e + except Exception as e: + raise UserError(_("S3 connection test failed:\n%s", str(e))) from e + non_s3 = self - s3_servers + if non_s3: + return super(FetchmailServer, non_s3).button_confirm_login() + return True + + def fetch_mail(self, raise_exception=True): + """Extend fetch_mail to handle S3 server type.""" + s3_servers = self.filtered(lambda s: s._get_connection_type() == "s3") + non_s3 = self - s3_servers + for server in s3_servers: + server._fetch_mail_s3(raise_exception=raise_exception) + if non_s3: + return super(FetchmailServer, non_s3).fetch_mail( + raise_exception=raise_exception + ) + return True + + def _fetch_mail_s3(self, raise_exception=True): + """Fetch and process emails from an S3 bucket.""" + self.ensure_one() + _logger.info( + "Start checking for new emails on S3 server %s (bucket: %s, prefix: %s)", + self.name, + self.s3_bucket, + self.s3_prefix, + ) + context = { + "fetchmail_cron_running": True, + "default_fetchmail_server_id": self.id, + } + MailThread = self.env["mail.thread"] + count, failed = 0, 0 + try: + client = self._get_s3_client() + paginator = client.get_paginator("list_objects_v2") + for page in paginator.paginate( + Bucket=self.s3_bucket, Prefix=self.s3_prefix or "" + ): + for obj in page.get("Contents", []): + key = obj["Key"] + if key.endswith("/"): + continue + try: + response = client.get_object(Bucket=self.s3_bucket, Key=key) + raw_email = response["Body"].read() + except ClientError: + _logger.warning( + "Failed to download S3 object %s", key, exc_info=True + ) + failed += 1 + continue + try: + MailThread.with_context(**context).message_process( + self.object_id.model, + raw_email, + save_original=self.original, + strip_attachments=(not self.attach), + ) + except Exception: + _logger.info( + "Failed to process mail from S3 key %s", + key, + exc_info=True, + ) + failed += 1 + self.env.cr.commit() # pylint: disable=invalid-commit + continue + self._s3_handle_processed(client, key) + self.env.cr.commit() # pylint: disable=invalid-commit + count += 1 + _logger.info( + "Fetched %d email(s) on S3 server %s; %d succeeded, %d failed.", + count, + self.name, + count - failed, + failed, + ) + except Exception as e: + if raise_exception: + raise UserError(_("Couldn't fetch emails from S3:\n%s", str(e))) from e + _logger.info( + "General failure fetching from S3 server %s.", + self.name, + exc_info=True, + ) + + def _s3_handle_processed(self, client, key): + """Archive or delete a processed S3 object.""" + self.ensure_one() + try: + if self.s3_archive_prefix: + filename = key.rsplit("/", 1)[-1] + archive_key = f"{self.s3_archive_prefix}{filename}" + client.copy_object( + Bucket=self.s3_bucket, + CopySource={"Bucket": self.s3_bucket, "Key": key}, + Key=archive_key, + ) + client.delete_object(Bucket=self.s3_bucket, Key=key) + except ClientError: + _logger.warning("Failed to archive/delete S3 object %s", key, exc_info=True) diff --git a/fetchmail_s3/pyproject.toml b/fetchmail_s3/pyproject.toml new file mode 100644 index 00000000000..baa397f2545 --- /dev/null +++ b/fetchmail_s3/pyproject.toml @@ -0,0 +1,8 @@ +[project] +name = "odoo-addon-fetchmail_s3" +version = "18.0.1.0.0" +requires-python = ">=3.10" + +[build-system] +requires = ["whool"] +build-backend = "whool.buildapi" diff --git a/fetchmail_s3/readme/CONFIGURE.md b/fetchmail_s3/readme/CONFIGURE.md new file mode 100644 index 00000000000..ebd384461eb --- /dev/null +++ b/fetchmail_s3/readme/CONFIGURE.md @@ -0,0 +1,22 @@ +## AWS SES Setup + +1. Verify your domain in AWS SES (e.g., `docs.example.com`) +2. Add MX record: `docs.example.com MX 10 inbound-smtp.us-east-1.amazonaws.com` +3. Create an SES receipt rule that stores emails in an S3 bucket +4. Create an IAM user with `s3:GetObject`, `s3:ListBucket`, `s3:DeleteObject`, + `s3:PutObject` permissions on the bucket + +## Odoo Configuration + +1. Go to **Settings → Technical → Incoming Mail Servers** +2. Create a new server with type **S3 Bucket** +3. Fill in: + - **S3 Bucket Name**: your bucket (e.g., `my-ses-incoming`) + - **Object Key Prefix**: the prefix SES writes to (e.g., `emails/`) + - **AWS Region**: the bucket's region (e.g., `us-east-1`) + - **Access Key ID** and **Secret Access Key**: IAM credentials + - **Endpoint URL**: leave empty for AWS S3, or set for S3-compatible services + - **Archive Prefix**: where to move processed emails (e.g., `processed/`). + Leave empty to delete after processing. +4. Click **Test & Confirm** to verify connectivity +5. Set the **Create a New Record** model (e.g., DMS Directory for document filing) diff --git a/fetchmail_s3/readme/CONTRIBUTORS.md b/fetchmail_s3/readme/CONTRIBUTORS.md new file mode 100644 index 00000000000..9e17a5fcccb --- /dev/null +++ b/fetchmail_s3/readme/CONTRIBUTORS.md @@ -0,0 +1 @@ +- Don Kendall diff --git a/fetchmail_s3/readme/DESCRIPTION.md b/fetchmail_s3/readme/DESCRIPTION.md new file mode 100644 index 00000000000..b13e6f28895 --- /dev/null +++ b/fetchmail_s3/readme/DESCRIPTION.md @@ -0,0 +1,12 @@ +Receive incoming emails from an S3-compatible bucket instead of IMAP/POP. + +This module adds an "S3 Bucket" server type to Odoo's incoming mail servers. +It polls an S3 bucket for raw email files (`.eml`) and processes them through +Odoo's standard mail gateway (`mail.thread.message_process`). + +**Typical use case**: AWS SES inbound email rules store messages in S3. This +module picks them up on a cron schedule, processes them into Odoo records +(leads, tickets, DMS documents, etc.), then archives or deletes the S3 objects. + +Works with any S3-compatible storage (AWS S3, MinIO, Hetzner Object Storage, +DigitalOcean Spaces, etc.). diff --git a/fetchmail_s3/static/description/index.html b/fetchmail_s3/static/description/index.html new file mode 100644 index 00000000000..02be30436b8 --- /dev/null +++ b/fetchmail_s3/static/description/index.html @@ -0,0 +1,480 @@ + + + + + +README.rst + + + +
+ + + +Odoo Community Association + +
+

Fetchmail S3

+ +

Beta License: AGPL-3 OCA/server-tools Translate me on Weblate Try me on Runboat

+

Receive incoming emails from an S3-compatible bucket instead of +IMAP/POP.

+

This module adds an “S3 Bucket” server type to Odoo’s incoming mail +servers. It polls an S3 bucket for raw email files (.eml) and +processes them through Odoo’s standard mail gateway +(mail.thread.message_process).

+

Typical use case: AWS SES inbound email rules store messages in S3. +This module picks them up on a cron schedule, processes them into Odoo +records (leads, tickets, DMS documents, etc.), then archives or deletes +the S3 objects.

+

Works with any S3-compatible storage (AWS S3, MinIO, Hetzner Object +Storage, DigitalOcean Spaces, etc.).

+

Table of contents

+ +
+

Configuration

+
+

AWS SES Setup

+
    +
  1. Verify your domain in AWS SES (e.g., docs.example.com)
  2. +
  3. Add MX record: +docs.example.com MX 10 inbound-smtp.us-east-1.amazonaws.com
  4. +
  5. Create an SES receipt rule that stores emails in an S3 bucket
  6. +
  7. Create an IAM user with s3:GetObject, s3:ListBucket, +s3:DeleteObject, s3:PutObject permissions on the bucket
  8. +
+
+
+

Odoo Configuration

+
    +
  1. Go to Settings → Technical → Incoming Mail Servers
  2. +
  3. Create a new server with type S3 Bucket
  4. +
  5. Fill in:
      +
    • S3 Bucket Name: your bucket (e.g., my-ses-incoming)
    • +
    • Object Key Prefix: the prefix SES writes to (e.g., emails/)
    • +
    • AWS Region: the bucket’s region (e.g., us-east-1)
    • +
    • Access Key ID and Secret Access Key: IAM credentials
    • +
    • Endpoint URL: leave empty for AWS S3, or set for S3-compatible +services
    • +
    • Archive Prefix: where to move processed emails (e.g., +processed/). Leave empty to delete after processing.
    • +
    +
  6. +
  7. Click Test & Confirm to verify connectivity
  8. +
  9. Set the Create a New Record model (e.g., DMS Directory for +document filing)
  10. +
+
+
+
+

Bug Tracker

+

Bugs are tracked on GitHub Issues. +In case of trouble, please check there if your issue has already been reported. +If you spotted it first, help us to smash it by providing a detailed and welcomed +feedback.

+

Do not contact contributors directly about support or help with technical issues.

+
+
+

Credits

+
+

Authors

+
    +
  • Ledo Enterprises
  • +
+
+
+

Contributors

+ +
+
+

Maintainers

+

This module is maintained by the OCA.

+ +Odoo Community Association + +

OCA, or the Odoo Community Association, is a nonprofit organization whose +mission is to support the collaborative development of Odoo features and +promote its widespread use.

+

This module is part of the OCA/server-tools project on GitHub.

+

You are welcome to contribute. To learn how please visit https://odoo-community.org/page/Contribute.

+
+
+
+
+ + diff --git a/fetchmail_s3/tests/__init__.py b/fetchmail_s3/tests/__init__.py new file mode 100644 index 00000000000..b651e27cc59 --- /dev/null +++ b/fetchmail_s3/tests/__init__.py @@ -0,0 +1 @@ +from . import test_fetchmail_s3 diff --git a/fetchmail_s3/tests/test_fetchmail_s3.py b/fetchmail_s3/tests/test_fetchmail_s3.py new file mode 100644 index 00000000000..5a99ee79d1c --- /dev/null +++ b/fetchmail_s3/tests/test_fetchmail_s3.py @@ -0,0 +1,103 @@ +# Copyright 2026 Ledo Enterprises +# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl). + +from email.message import EmailMessage +from unittest.mock import MagicMock, patch + +from odoo.tests import tagged + +from odoo.addons.mail.tests.common import MailCommon + + +@tagged("post_install", "-at_install") +class TestFetchmailS3(MailCommon): + @classmethod + def setUpClass(cls): + super().setUpClass() + cls.server = cls.env["fetchmail.server"].create( + { + "name": "Test S3 Server", + "server_type": "s3", + "s3_bucket": "test-bucket", + "s3_prefix": "emails/", + "s3_region": "us-east-1", + "s3_access_key": "AKIATEST", + "s3_secret_key": "secret123", + "s3_archive_prefix": "processed/", + "object_id": cls.env.ref("mail.model_mail_channel").id, + } + ) + + def _make_raw_email(self, subject="Test Email", body="Hello from S3"): + """Build a minimal RFC822 email as bytes.""" + msg = EmailMessage() + msg["From"] = "sender@example.com" + msg["To"] = "test@docs.ledoweb.com" + msg["Subject"] = subject + msg.set_content(body) + return msg.as_bytes() + + def _mock_s3_client(self, objects=None): + """Return a mocked boto3 S3 client.""" + client = MagicMock() + if objects is None: + objects = [] + paginator = MagicMock() + paginator.paginate.return_value = [ + {"Contents": [{"Key": key} for key in objects]} + ] + client.get_paginator.return_value = paginator + for key in objects: + body = MagicMock() + body.read.return_value = self._make_raw_email(subject=f"Email: {key}") + client.get_object.return_value = {"Body": body} + client.list_objects_v2.return_value = {"KeyCount": len(objects)} + return client + + @patch("odoo.addons.fetchmail_s3.models.fetchmail_server.boto3") + def test_connection_type(self, mock_boto3): + self.assertEqual(self.server._get_connection_type(), "s3") + + @patch("odoo.addons.fetchmail_s3.models.fetchmail_server.boto3") + def test_button_confirm_login(self, mock_boto3): + mock_client = MagicMock() + mock_client.list_objects_v2.return_value = {"KeyCount": 0} + mock_boto3.client.return_value = mock_client + self.server.button_confirm_login() + self.assertEqual(self.server.state, "done") + mock_client.list_objects_v2.assert_called_once() + + @patch("odoo.addons.fetchmail_s3.models.fetchmail_server.boto3") + def test_fetch_mail_empty_bucket(self, mock_boto3): + mock_boto3.client.return_value = self._mock_s3_client(objects=[]) + self.server.write({"state": "done"}) + self.server.fetch_mail() + + @patch("odoo.addons.fetchmail_s3.models.fetchmail_server.boto3") + def test_fetch_mail_processes_email(self, mock_boto3): + mock_client = self._mock_s3_client(objects=["emails/msg001"]) + mock_boto3.client.return_value = mock_client + self.server.write({"state": "done"}) + self.server.fetch_mail() + # Verify the email was archived (copy + delete) + mock_client.copy_object.assert_called_once() + mock_client.delete_object.assert_called_once() + + @patch("odoo.addons.fetchmail_s3.models.fetchmail_server.boto3") + def test_fetch_mail_delete_without_archive(self, mock_boto3): + self.server.write({"s3_archive_prefix": False, "state": "done"}) + mock_client = self._mock_s3_client(objects=["emails/msg002"]) + mock_boto3.client.return_value = mock_client + self.server.fetch_mail() + # No copy, only delete + mock_client.copy_object.assert_not_called() + mock_client.delete_object.assert_called_once() + + @patch("odoo.addons.fetchmail_s3.models.fetchmail_server.boto3") + def test_skips_directory_keys(self, mock_boto3): + mock_client = self._mock_s3_client(objects=["emails/", "emails/msg003"]) + mock_boto3.client.return_value = mock_client + self.server.write({"state": "done"}) + self.server.fetch_mail() + # Only msg003 should be fetched, not the "directory" key + mock_client.get_object.assert_called_once() diff --git a/fetchmail_s3/views/fetchmail_server_views.xml b/fetchmail_s3/views/fetchmail_server_views.xml new file mode 100644 index 00000000000..f045836916e --- /dev/null +++ b/fetchmail_s3/views/fetchmail_server_views.xml @@ -0,0 +1,50 @@ + + + + + fetchmail.server.form.s3 + fetchmail.server + + + + + + + + + + + + + + server_type == 's3' + + + server_type == 's3' + + + server_type == 's3' + + + server_type == 's3' + + + server_type == 's3' + + + +