Bank Services tested and completed
This commit is contained in:
parent
2c5f00ab1d
commit
35aab0ba11
|
|
@ -0,0 +1,27 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc \
|
||||
&& rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /BankServices/EmailService/pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root --only main \
|
||||
&& pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /BankServices/EmailService /BankServices/EmailService
|
||||
COPY /BankServices/EmailService /
|
||||
COPY /Controllers /Controllers
|
||||
COPY /BankServices/ServiceDepends/config.py /BankServices/ServiceDepends/config.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "app.py"]
|
||||
|
|
@ -0,0 +1,84 @@
|
|||
# Email Service
|
||||
|
||||
## Overview
|
||||
The Email Service is the first component in a Redis pub/sub processing chain for bank-related email automation. It monitors a specified mailbox for emails with attachments, filters them based on IBAN criteria, and publishes the data to a Redis channel for further processing.
|
||||
|
||||
## Features
|
||||
|
||||
### Email Processing
|
||||
- Connects to a configured mailbox using IMAP
|
||||
- Implements smart date-based filtering:
|
||||
- Checks emails from the past 14 days on the first run of each day
|
||||
- Checks emails from the past 7 days on subsequent runs within the same day
|
||||
- Extracts attachments from emails
|
||||
- Filters attachments based on IBAN criteria
|
||||
- Uses a context manager to ensure emails are properly handled even during errors
|
||||
|
||||
### Redis Integration
|
||||
- Publishes messages to a Redis pub/sub channel ("CollectedData")
|
||||
- Each message contains:
|
||||
- Unique UUID
|
||||
- Timestamp
|
||||
- Initial stage marker ("red")
|
||||
- Attachment payload and metadata
|
||||
- Connects to an external Redis server
|
||||
|
||||
### Error Handling
|
||||
- Robust error management with context managers
|
||||
- Automatic marking of emails as unread if processing fails
|
||||
- Comprehensive logging
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
```
|
||||
EMAIL_HOST=10.10.2.34
|
||||
EMAIL_USERNAME=isbank@mehmetkaratay.com.tr
|
||||
EMAIL_PASSWORD=system
|
||||
EMAIL_SLEEP=60
|
||||
AUTHORIZE_IBAN=4245-0093333
|
||||
REDIS_HOST=10.10.2.15
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=your_strong_password_here
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Docker
|
||||
The service is containerized using Docker and can be deployed using the provided Dockerfile and docker-compose configuration.
|
||||
|
||||
```bash
|
||||
# Build and start the service
|
||||
docker compose -f bank-services-docker-compose.yml up -d --build
|
||||
|
||||
# View logs
|
||||
docker compose -f bank-services-docker-compose.yml logs -f email_service
|
||||
|
||||
# Stop the service
|
||||
docker compose -f bank-services-docker-compose.yml down
|
||||
```
|
||||
|
||||
### Service Management
|
||||
The `check_bank_services.sh` script provides a simple way to restart the service:
|
||||
|
||||
```bash
|
||||
./check_bank_services.sh
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Redis Pub/Sub Chain
|
||||
This service is the first in a multi-stage processing chain:
|
||||
1. **Email Service** (this service): Reads emails, extracts attachments, publishes to Redis with stage="red"
|
||||
2. **Processor Service**: Subscribes to stage="red" messages, processes data, republishes with stage="processed"
|
||||
3. **Writer Service**: Subscribes to stage="processed" messages, writes data to final destination, marks as stage="completed"
|
||||
|
||||
## Development
|
||||
|
||||
### Dependencies
|
||||
- Python 3.12
|
||||
- Redbox (email library)
|
||||
- Redis
|
||||
|
||||
### State Management
|
||||
The service maintains a state file at `/tmp/email_service_last_run.json` to track when it last ran, enabling the smart date-based filtering feature.
|
||||
|
|
@ -0,0 +1,221 @@
|
|||
import time
|
||||
import arrow
|
||||
import os
|
||||
import json
|
||||
import base64
|
||||
from uuid import uuid4
|
||||
from datetime import datetime, timedelta
|
||||
from typing import TypeVar
|
||||
from BankServices.ServiceDepends.config import Config
|
||||
|
||||
from redbox import EmailBox
|
||||
from redbox.query import FROM, UNSEEN, OR, SINCE
|
||||
|
||||
# Import Redis pub/sub handler
|
||||
from Controllers.Redis.Broadcast.actions import redis_pubsub
|
||||
|
||||
|
||||
authorized_iban = Config.AUTHORIZE_IBAN
|
||||
authorized_iban_cleaned = authorized_iban.replace("-", "")
|
||||
# Define Redis channel name
|
||||
REDIS_CHANNEL = "reader"
|
||||
delimiter = "|"
|
||||
|
||||
# banks_mails = mailbox.search(from_=filter_mail, unseen=True) bununla denemeyin
|
||||
# banks_mails = mailbox.search(FROM(filter_mail) & UNSEEN)
|
||||
|
||||
|
||||
T = TypeVar("T")
|
||||
|
||||
|
||||
class EmailProcessingContext:
|
||||
"""Context manager for email processing that marks emails as unread if an error occurs."""
|
||||
|
||||
def __init__(self, email_message, mark_as_read: bool = True):
|
||||
self.email_message = email_message
|
||||
self.mark_as_read = mark_as_read
|
||||
self.success = False
|
||||
|
||||
def __enter__(self):
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
if exc_type is not None or not self.success:
|
||||
# If an exception occurred or processing wasn't successful, mark as unread
|
||||
try:
|
||||
if hasattr(self.email_message, 'mark_as_unread'):
|
||||
self.email_message.mark_as_unread()
|
||||
print(f"[EMAIL_SERVICE] Marked email as UNREAD due to processing error: {exc_val if exc_val else 'Unknown error'}")
|
||||
except Exception as e:
|
||||
print(f"[EMAIL_SERVICE] Failed to mark email as unread: {str(e)}")
|
||||
elif self.mark_as_read:
|
||||
# If processing was successful and mark_as_read is True, ensure it's marked as read
|
||||
try:
|
||||
if hasattr(self.email_message, 'mark_as_read'):
|
||||
self.email_message.mark_as_read()
|
||||
except Exception as e:
|
||||
print(f"[EMAIL_SERVICE] Failed to mark email as read: {str(e)}")
|
||||
return False # Don't suppress exceptions
|
||||
|
||||
|
||||
def publish_payload_to_redis(
|
||||
payload, filename: str, mail_info: dict
|
||||
) -> bool:
|
||||
# Create message document
|
||||
# Use base64 encoding for binary payloads to ensure proper transmission
|
||||
if isinstance(payload, bytes):
|
||||
encoded_payload = base64.b64encode(payload).decode('utf-8')
|
||||
is_base64 = True
|
||||
else:
|
||||
encoded_payload = payload
|
||||
is_base64 = False
|
||||
|
||||
message = {
|
||||
"filename": filename,
|
||||
"payload": encoded_payload,
|
||||
"is_base64": is_base64, # Flag to indicate if payload is base64 encoded
|
||||
"stage": "red", # Initial stage for the processing chain
|
||||
"created_at": str(arrow.now()),
|
||||
"uuid": str(uuid4()), # Use UUID
|
||||
**mail_info,
|
||||
}
|
||||
|
||||
# Publish to Redis channel
|
||||
result = redis_pubsub.publisher.publish(REDIS_CHANNEL, message)
|
||||
|
||||
if result.status:
|
||||
print(f"[EMAIL_SERVICE] Published message with filename: {filename} to channel: {REDIS_CHANNEL}")
|
||||
return True
|
||||
else:
|
||||
print(f"[EMAIL_SERVICE] Publish error: {result.error}")
|
||||
return False
|
||||
|
||||
|
||||
def read_email_and_publish_to_redis(email_message, mail_info: dict) -> bool:
|
||||
if email_message.is_multipart(): # Check if email has multipart content
|
||||
for part in email_message.walk(): # Each part can be an attachment
|
||||
content_disposition = part.get("Content-Disposition")
|
||||
if content_disposition and "attachment" in content_disposition:
|
||||
if filename := part.get_filename():
|
||||
is_iban_in_filename = authorized_iban_cleaned in str(filename)
|
||||
if is_iban_in_filename:
|
||||
if payload := part.get_payload(decode=True):
|
||||
return publish_payload_to_redis(
|
||||
payload=payload,
|
||||
filename=filename,
|
||||
mail_info=mail_info,
|
||||
)
|
||||
else: # Handle non-multipart email, though this is rare for emails with attachments
|
||||
content_disposition = email_message.get("Content-Disposition")
|
||||
if content_disposition and "attachment" in content_disposition:
|
||||
if filename := email_message.get_filename():
|
||||
is_iban_in_filename = authorized_iban_cleaned in str(filename)
|
||||
if is_iban_in_filename:
|
||||
payload = email_message.get_payload(decode=True)
|
||||
return publish_payload_to_redis(
|
||||
payload=payload,
|
||||
filename=filename,
|
||||
mail_info=mail_info,
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def app():
|
||||
# Get email configuration
|
||||
host = Config.EMAIL_HOST
|
||||
port = Config.EMAIL_PORT
|
||||
username = Config.EMAIL_USERNAME
|
||||
password = Config.EMAIL_PASSWORD
|
||||
|
||||
box = EmailBox(host=host, port=port, username=username, password=password)
|
||||
if not box:
|
||||
return Exception("Mailbox not found")
|
||||
|
||||
box.connect()
|
||||
mail_folders = box.mailfolders
|
||||
filter_mail = OR(FROM(Config.MAILBOX), FROM(Config.MAIN_MAIL))
|
||||
filter_print = f"{Config.MAILBOX} & {Config.MAIN_MAIL}"
|
||||
|
||||
# Determine if this is the first run of the day
|
||||
# Store last run date in a file
|
||||
last_run_file = "/tmp/email_service_last_run.json"
|
||||
current_date = datetime.now().strftime("%Y-%m-%d")
|
||||
days_to_check, full_check = 7, 90 # Default to 7 days
|
||||
|
||||
try:
|
||||
if os.path.exists(last_run_file):
|
||||
with open(last_run_file, 'r') as f:
|
||||
last_run_data = json.load(f)
|
||||
last_run_date = last_run_data.get('last_run_date')
|
||||
|
||||
# If this is the first run of a new day, check 90 days
|
||||
if last_run_date != current_date:
|
||||
days_to_check = full_check
|
||||
print(f"[EMAIL_SERVICE] First run of the day. Checking emails from the past {days_to_check} days")
|
||||
else:
|
||||
print(f"[EMAIL_SERVICE] Subsequent run today. Checking emails from the past {days_to_check} days")
|
||||
else:
|
||||
# If no last run file exists, this is the first run ever - check 90 days
|
||||
days_to_check = full_check
|
||||
print(f"[EMAIL_SERVICE] First run detected. Checking emails from the past {days_to_check} days")
|
||||
except Exception as e:
|
||||
print(f"[EMAIL_SERVICE] Error reading last run file: {str(e)}. Using default of {days_to_check} days")
|
||||
|
||||
# Update the last run file
|
||||
try:
|
||||
with open(last_run_file, 'w') as f:
|
||||
json.dump({'last_run_date': current_date}, f)
|
||||
except Exception as e:
|
||||
print(f"[EMAIL_SERVICE] Error writing last run file: {str(e)}")
|
||||
|
||||
# Calculate the date to check from
|
||||
check_since_date = (datetime.now() - timedelta(days=days_to_check)).strftime("%d-%b-%Y")
|
||||
|
||||
for folder in mail_folders:
|
||||
if folder.name == "INBOX":
|
||||
# Search for emails since the calculated date
|
||||
banks_mails = folder.search(filter_mail & SINCE(check_since_date))
|
||||
print(
|
||||
f"[EMAIL_SERVICE] Reading mailbox [{username}] with mail sender [{filter_print}] since {check_since_date} with count: {len(banks_mails)}"
|
||||
)
|
||||
|
||||
for banks_mail in banks_mails or []:
|
||||
if email_message := banks_mail.email:
|
||||
# Use context manager to handle errors and mark email as unread if needed
|
||||
with EmailProcessingContext(banks_mail) as ctx:
|
||||
try:
|
||||
headers = {k.lower(): v for k, v in banks_mail.headers.items()}
|
||||
mail_info = {
|
||||
"from": headers["from"],
|
||||
"to": headers["to"],
|
||||
"subject": headers["subject"],
|
||||
"date": str(headers["date"]),
|
||||
}
|
||||
|
||||
# Process the email and publish to Redis
|
||||
success = read_email_and_publish_to_redis(
|
||||
email_message=email_message, mail_info=mail_info
|
||||
)
|
||||
|
||||
# Set success flag for the context manager
|
||||
ctx.success = success
|
||||
|
||||
if success:
|
||||
print(f"[EMAIL_SERVICE] Successfully processed email with subject: {mail_info['subject']}")
|
||||
else:
|
||||
print(f"[EMAIL_SERVICE] No matching attachments found in email with subject: {mail_info['subject']}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"[EMAIL_SERVICE] Error processing email: {str(e)}")
|
||||
# The context manager will mark the email as unread
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("=== Starting Email Service with Redis Pub/Sub ===")
|
||||
print(f"Publishing to channel: {REDIS_CHANNEL}")
|
||||
time.sleep(20) # Wait for 20 seconds to other services to kick in
|
||||
|
||||
while True:
|
||||
print("\n[EMAIL_SERVICE] Checking for new emails...")
|
||||
app()
|
||||
time.sleep(Config.EMAIL_SLEEP)
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
[project]
|
||||
name = "emailservice"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"arrow>=1.3.0",
|
||||
"redbox>=0.2.1",
|
||||
"redis>=5.2.1",
|
||||
"pydantic-settings>=2.8.1",
|
||||
]
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc \
|
||||
&& rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /BankServices/ParserService/pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root --only main \
|
||||
&& pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /BankServices/ParserService /BankServices/ParserService
|
||||
COPY /BankServices/ParserService /
|
||||
COPY /BankServices/ServiceDepends /BankServices/ServiceDepends
|
||||
|
||||
COPY /Controllers /Controllers
|
||||
COPY /BankServices/ServiceDepends/config.py /BankServices/ServiceDepends/config.py
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "/BankServices/ParserService/app.py"]
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
# Parser Service
|
||||
|
||||
## Overview
|
||||
The Parser Service is the second component in the Redis pub/sub processing chain for bank-related email automation. It subscribes to messages with stage="red" from the Email Service, parses Excel attachments, and publishes the processed data back to Redis with stage="parsed".
|
||||
|
||||
## Features
|
||||
|
||||
### Redis Integration
|
||||
- Subscribes to the "CollectedData" Redis channel for messages with stage="red"
|
||||
- Processes Excel attachments contained in the messages
|
||||
- Publishes parsed data back to Redis with stage="parsed" or "not found"
|
||||
- Maintains message metadata and adds processing timestamps
|
||||
|
||||
### Excel Processing
|
||||
- Parses bank statement Excel files
|
||||
- Extracts transaction data including:
|
||||
- IBAN numbers
|
||||
- Transaction dates and times
|
||||
- Currency values and balances
|
||||
- Transaction types and references
|
||||
- Branch information
|
||||
|
||||
### Error Handling
|
||||
- Robust error management for Excel parsing
|
||||
- Detailed logging of processing steps and errors
|
||||
- Graceful handling of malformed messages
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
The service uses the same Redis configuration as the Email Service:
|
||||
```
|
||||
REDIS_HOST=10.10.2.15
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=your_strong_password_here
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Docker
|
||||
The service is containerized using Docker and can be deployed using the provided Dockerfile and docker-compose configuration.
|
||||
|
||||
```bash
|
||||
# Build and start the service
|
||||
docker compose -f bank-services-docker-compose.yml up -d --build
|
||||
|
||||
# View logs
|
||||
docker compose -f bank-services-docker-compose.yml logs -f parser_service
|
||||
|
||||
# Stop the service
|
||||
docker compose -f bank-services-docker-compose.yml down
|
||||
```
|
||||
|
||||
### Service Management
|
||||
The `check_bank_services.sh` script provides a simple way to restart the service:
|
||||
|
||||
```bash
|
||||
./check_bank_services.sh
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Redis Pub/Sub Chain
|
||||
This service is the second in a multi-stage processing chain:
|
||||
1. **Email Service**: Reads emails, extracts attachments, publishes to Redis with stage="red"
|
||||
2. **Parser Service** (this service): Subscribes to stage="red" messages, parses Excel data, republishes with stage="parsed"
|
||||
3. **Writer Service**: Subscribes to stage="processed" messages, writes data to final destination, marks as stage="completed"
|
||||
|
||||
## Development
|
||||
|
||||
### Dependencies
|
||||
- Python 3.12
|
||||
- Pandas and xlrd for Excel processing
|
||||
- Redis for pub/sub messaging
|
||||
- Arrow for date handling
|
||||
- Unidecode for text normalization
|
||||
|
|
@ -0,0 +1,227 @@
|
|||
import time
|
||||
import arrow
|
||||
import io
|
||||
import json
|
||||
import base64
|
||||
import datetime
|
||||
|
||||
from pandas import DataFrame, read_excel
|
||||
from unidecode import unidecode
|
||||
from BankServices.ServiceDepends.config import Config
|
||||
|
||||
# Import Redis pub/sub handler
|
||||
from Controllers.Redis.Broadcast.actions import redis_pubsub
|
||||
|
||||
# Define Redis channels
|
||||
REDIS_CHANNEL_IN = "reader" # Subscribe to Email Service channel
|
||||
REDIS_CHANNEL_OUT = "parser" # Publish to Parser Service channel
|
||||
delimiter = "|"
|
||||
|
||||
|
||||
def publish_parsed_data_to_redis(data, collected_data_dict: list[dict], filename: str) -> bool:
|
||||
"""Publish parsed data to Redis.
|
||||
|
||||
Args:
|
||||
data: Original message data from Redis
|
||||
collected_data_dict: Parsed data from Excel file
|
||||
filename: Name of the processed file
|
||||
|
||||
Returns:
|
||||
bool: Success status
|
||||
"""
|
||||
# Create a copy of the original message to preserve metadata
|
||||
message = data.copy() if isinstance(data, dict) else {}
|
||||
message.pop("payload", None)
|
||||
message.pop("is_base64", None)
|
||||
# Update with parsed data and new stage
|
||||
if collected_data_dict:
|
||||
message["parsed"] = collected_data_dict
|
||||
message["stage"] = "parsed" # Update stage to 'parsed'
|
||||
else:
|
||||
message["parsed"] = None
|
||||
message["stage"] = "not found" # Mark as 'not found' if parsing failed
|
||||
|
||||
# Add processing timestamp
|
||||
message["parsed_at"] = str(arrow.now())
|
||||
message["filename"] = filename
|
||||
|
||||
# Publish to Redis channel
|
||||
result = redis_pubsub.publisher.publish(REDIS_CHANNEL_OUT, message)
|
||||
|
||||
if result.status:
|
||||
print(f"[PARSER_SERVICE] Published parsed data for {filename} with stage: {message['stage']}")
|
||||
return True
|
||||
else:
|
||||
print(f"[PARSER_SERVICE] Publish error: {result.error}")
|
||||
return False
|
||||
|
||||
|
||||
def parse_excel_file(excel_frame: DataFrame) -> list[dict]:
|
||||
"""Parse Excel file data.
|
||||
|
||||
Args:
|
||||
excel_frame: DataFrame containing Excel data
|
||||
|
||||
Returns:
|
||||
list[dict]: List of parsed data dictionaries
|
||||
"""
|
||||
iban, data_list = "", []
|
||||
try:
|
||||
for row in excel_frame.itertuples():
|
||||
if "IBAN" in str(row[3]).upper():
|
||||
iban = str(row[5]).replace(" ", "")
|
||||
if not str(row[1]) == "nan" and not str(row[2]) == "nan":
|
||||
if len(str(row[1]).split("/")) > 2:
|
||||
data_list.append(
|
||||
dict(
|
||||
iban=str(iban),
|
||||
bank_date=arrow.get(
|
||||
datetime.datetime.strptime(str(row[1]), "%d/%m/%Y-%H:%M:%S")
|
||||
).__str__(),
|
||||
channel_branch=unidecode(str(row[3])),
|
||||
currency_value=(
|
||||
float(str(row[4]).replace(",", "")) if row[4] else 0
|
||||
),
|
||||
balance=float(str(row[5]).replace(",", "")) if row[5] else 0,
|
||||
additional_balance=(
|
||||
float(str(row[6]).replace(",", "")) if row[6] else 0
|
||||
),
|
||||
process_name=str(row[7]),
|
||||
process_type=unidecode(str(row[8])),
|
||||
process_comment=unidecode(str(row[9])),
|
||||
bank_reference_code=str(row[15]),
|
||||
)
|
||||
)
|
||||
print(f"[PARSER_SERVICE] Successfully parsed {len(data_list)} records from Excel file")
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Error parsing Excel file: {str(e)}")
|
||||
return data_list
|
||||
|
||||
|
||||
def process_message(message):
|
||||
"""Process a message from Redis.
|
||||
|
||||
Args:
|
||||
message: Message data from Redis subscriber
|
||||
"""
|
||||
# Extract the message data
|
||||
data = message["data"]
|
||||
|
||||
# If data is a string, parse it as JSON
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"[PARSER_SERVICE] Error parsing message data: {e}")
|
||||
return
|
||||
|
||||
# Check if stage is 'red' before processing
|
||||
if data.get("stage") == "red":
|
||||
try:
|
||||
filename = data.get("filename")
|
||||
payload = data.get("payload")
|
||||
is_base64 = data.get("is_base64", False)
|
||||
print(f"[PARSER_SERVICE] Processing file: {filename}")
|
||||
|
||||
# Handle base64-encoded payload
|
||||
if is_base64 and isinstance(payload, str):
|
||||
try:
|
||||
# Decode base64 string to bytes
|
||||
payload = base64.b64decode(payload)
|
||||
print(f"[PARSER_SERVICE] Successfully decoded base64 payload, size: {len(payload)} bytes")
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Error decoding base64 payload: {str(e)}")
|
||||
# Convert regular string payload to bytes if needed
|
||||
elif isinstance(payload, str):
|
||||
payload = payload.encode('utf-8')
|
||||
|
||||
# Create an in-memory file-like object and try multiple approaches
|
||||
excel_frame = None
|
||||
errors = []
|
||||
|
||||
# Save payload to a temporary file for debugging if needed
|
||||
temp_file_path = f"/tmp/{filename}"
|
||||
try:
|
||||
with open(temp_file_path, 'wb') as f:
|
||||
f.write(payload)
|
||||
print(f"[PARSER_SERVICE] Saved payload to {temp_file_path} for debugging")
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Could not save debug file: {str(e)}")
|
||||
|
||||
# Try different approaches to read the Excel file
|
||||
approaches = [
|
||||
# Approach 1: Try xlrd for .xls files
|
||||
lambda: DataFrame(read_excel(io.BytesIO(payload), engine='xlrd')) if filename.lower().endswith('.xls') else None,
|
||||
# Approach 2: Try openpyxl for .xlsx files
|
||||
lambda: DataFrame(read_excel(io.BytesIO(payload), engine='openpyxl')) if filename.lower().endswith('.xlsx') else None,
|
||||
# Approach 3: Try xlrd with explicit sheet name
|
||||
lambda: DataFrame(read_excel(io.BytesIO(payload), engine='xlrd', sheet_name=0)),
|
||||
# Approach 4: Try with temporary file
|
||||
lambda: DataFrame(read_excel(temp_file_path)),
|
||||
]
|
||||
|
||||
# Try each approach until one works
|
||||
for i, approach in enumerate(approaches):
|
||||
try:
|
||||
result = approach()
|
||||
if result is not None:
|
||||
excel_frame = result
|
||||
print(f"[PARSER_SERVICE] Successfully read Excel file using approach {i+1}")
|
||||
break
|
||||
except Exception as e:
|
||||
errors.append(f"Approach {i+1}: {str(e)}")
|
||||
|
||||
# If all approaches failed, raise an exception
|
||||
if excel_frame is None:
|
||||
error_details = "\n".join(errors)
|
||||
raise Exception(f"Failed to read Excel file using all approaches:\n{error_details}")
|
||||
|
||||
# Extract data from the Excel file
|
||||
collected_data_dict = parse_excel_file(excel_frame)
|
||||
|
||||
# Publish parsed data to Redis
|
||||
publish_parsed_data_to_redis(
|
||||
data=data,
|
||||
collected_data_dict=collected_data_dict,
|
||||
filename=filename
|
||||
)
|
||||
except Exception as e:
|
||||
print(f"[PARSER_SERVICE] Error processing message: {str(e)}")
|
||||
else:
|
||||
print(f"[PARSER_SERVICE] Skipped message with UUID: {data.get('uuid')} (stage is not 'red')")
|
||||
|
||||
|
||||
def app():
|
||||
"""Main application function."""
|
||||
print("[PARSER_SERVICE] Starting Parser Service")
|
||||
|
||||
# Subscribe to the input channel
|
||||
result = redis_pubsub.subscriber.subscribe(REDIS_CHANNEL_IN, process_message)
|
||||
|
||||
if result.status:
|
||||
print(f"[PARSER_SERVICE] Subscribed to channel: {REDIS_CHANNEL_IN}")
|
||||
else:
|
||||
print(f"[PARSER_SERVICE] Subscribe error: {result.error}")
|
||||
return
|
||||
|
||||
# Start listening for messages
|
||||
listen_result = redis_pubsub.subscriber.start_listening(in_thread=True)
|
||||
|
||||
if listen_result.status:
|
||||
print("[PARSER_SERVICE] Listening for messages")
|
||||
else:
|
||||
print(f"[PARSER_SERVICE] Error starting listener: {listen_result.error}")
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Initialize the app once
|
||||
app()
|
||||
|
||||
# Keep the main thread alive
|
||||
try:
|
||||
while True:
|
||||
time.sleep(Config.EMAIL_SLEEP)
|
||||
except KeyboardInterrupt:
|
||||
print("\n[PARSER_SERVICE] Stopping service...")
|
||||
redis_pubsub.subscriber.stop_listening()
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
[project]
|
||||
name = "parserservice"
|
||||
version = "0.1.0"
|
||||
description = "Parser Service for bank email attachments using Redis pub/sub"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"arrow>=1.3.0",
|
||||
"pandas>=2.2.3",
|
||||
"redis>=5.0.1",
|
||||
"unidecode>=1.3.8",
|
||||
"xlrd>=2.0.1",
|
||||
"openpyxl>=3.1.2",
|
||||
"pydantic-settings>=2.8.1",
|
||||
]
|
||||
|
|
@ -0,0 +1,34 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /BankServices/RoutineEmailService/pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Install cron for scheduling tasks
|
||||
RUN apt-get update && apt-get install -y cron
|
||||
|
||||
# 11:00 Istanbul Time (UTC+3) system time is 08:00 UTC
|
||||
RUN echo "0 8 * * * /usr/local/bin/python /app.py >> /var/log/cron.log 2>&1" > /tmp/crontab_list && crontab /tmp/crontab_list
|
||||
|
||||
# Copy application code
|
||||
COPY /BankServices/RoutineEmailService /
|
||||
|
||||
COPY /Schemas /Schemas
|
||||
COPY /Controllers /Controllers
|
||||
COPY /BankServices/ServiceDepends /
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Create log file to grab cron logs
|
||||
RUN touch /var/log/cron.log
|
||||
|
||||
# Run cron setup and tail the log file for user to monitor logs
|
||||
CMD cron && tail -f /var/log/cron.log
|
||||
|
|
@ -0,0 +1,144 @@
|
|||
import arrow
|
||||
from typing import List, Any
|
||||
|
||||
from Schemas import AccountRecords
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from Controllers.Email.send_email import EmailSendModel, EmailService
|
||||
|
||||
|
||||
def render_email_template(
|
||||
headers: List[str], rows: List[List[Any]], balance_error: bool, bank_balance: str
|
||||
) -> str:
|
||||
"""
|
||||
Render the HTML email template with the provided data.
|
||||
|
||||
Args:
|
||||
headers: List of column headers for the table
|
||||
rows: List of data rows for the table
|
||||
balance_error: Flag indicating if there's a balance discrepancy
|
||||
bank_balance: Current bank balance formatted as string
|
||||
|
||||
Returns:
|
||||
Rendered HTML template as string
|
||||
"""
|
||||
try:
|
||||
# Look for template in ServiceDepends directory
|
||||
env = Environment(loader=FileSystemLoader("/"))
|
||||
template = env.get_template("template_accounts.html")
|
||||
|
||||
# Render template with variables
|
||||
return template.render(
|
||||
headers=headers,
|
||||
rows=rows,
|
||||
bank_balance=bank_balance,
|
||||
balance_error=balance_error,
|
||||
today=str(arrow.now().date()),
|
||||
)
|
||||
except Exception as e:
|
||||
print('Exception render template:',e)
|
||||
err = e
|
||||
raise
|
||||
|
||||
|
||||
def send_email_to_given_address(send_to: str, html_template: str) -> bool:
|
||||
"""
|
||||
Send email with the rendered HTML template to the specified address.
|
||||
|
||||
Args:
|
||||
send_to: Email address of the recipient
|
||||
html_template: Rendered HTML template content
|
||||
|
||||
Returns:
|
||||
Boolean indicating if the email was sent successfully
|
||||
"""
|
||||
today = arrow.now()
|
||||
subject = f"{str(today.date())} Gunes Apt. Cari Durum Bilgilendirme Raporu"
|
||||
|
||||
# Create email parameters using EmailSendModel
|
||||
email_params = EmailSendModel(
|
||||
subject=subject,
|
||||
html=html_template,
|
||||
receivers=[send_to],
|
||||
text=f"Gunes Apt. Cari Durum Bilgilendirme Raporu - {today.date()}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Use the context manager to handle connection errors
|
||||
with EmailService.new_session() as email_session:
|
||||
# Send email through the service
|
||||
EmailService.send_email(email_session, email_params)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'Exception send email: {e}')
|
||||
return False
|
||||
|
||||
|
||||
def set_account_records_to_send_email() -> bool:
|
||||
"""
|
||||
Retrieve account records from the database, format them, and send an email report.
|
||||
|
||||
Usage:
|
||||
from app import set_account_records_to_send_email
|
||||
|
||||
Returns:
|
||||
Boolean indicating if the process completed successfully
|
||||
"""
|
||||
# Get database session and retrieve records
|
||||
with AccountRecords.new_session() as db_session:
|
||||
account_records_query = AccountRecords.filter_all(db=db_session).query
|
||||
|
||||
# Get the 3 most recent records
|
||||
account_records: List[AccountRecords] | [] = (
|
||||
account_records_query.order_by(
|
||||
AccountRecords.bank_date.desc(),
|
||||
AccountRecords.bank_reference_code.desc()
|
||||
)
|
||||
.limit(3)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Check if we have enough records
|
||||
if len(account_records) < 2:
|
||||
return False
|
||||
|
||||
# Check for balance discrepancy
|
||||
first_record, second_record = account_records[0], account_records[1]
|
||||
expected_second_balance = first_record.bank_balance - first_record.currency_value
|
||||
balance_error = expected_second_balance != second_record.bank_balance
|
||||
|
||||
if balance_error:
|
||||
return False
|
||||
|
||||
# Format rows for the email template
|
||||
list_of_rows = []
|
||||
for record in account_records:
|
||||
list_of_rows.append([
|
||||
record.bank_date.strftime("%d/%m/%Y %H:%M"),
|
||||
record.process_comment,
|
||||
f"{record.currency_value:,.2f}",
|
||||
f"{record.bank_balance:,.2f}"
|
||||
])
|
||||
# Get the most recent bank balance
|
||||
last_bank_balance = sorted(account_records, key=lambda x: x.bank_date, reverse=True)[0].bank_balance
|
||||
# Define headers for the table
|
||||
headers = ["Ulaştığı Tarih", "Banka Transaksiyonu Ek Bilgi", "Aktarım Değeri", "Banka Bakiyesi"]
|
||||
|
||||
# Recipient email address
|
||||
send_to = "karatay@mehmetkaratay.com.tr"
|
||||
|
||||
# Render email template
|
||||
html_template = render_email_template(
|
||||
headers=headers,
|
||||
rows=list_of_rows,
|
||||
balance_error=balance_error,
|
||||
bank_balance=f"{last_bank_balance:,.2f}",
|
||||
)
|
||||
|
||||
# Send the email
|
||||
return send_email_to_given_address(send_to=send_to, html_template=html_template)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
success = set_account_records_to_send_email()
|
||||
exit_code = 0 if success else 1
|
||||
exit(exit_code)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
[project]
|
||||
name = "routineemailservice"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"arrow>=1.3.0",
|
||||
"redbox>=0.2.1",
|
||||
"redis>=5.2.1",
|
||||
"pydantic-settings>=2.8.1",
|
||||
"sqlalchemy-mixins>=2.0.5",
|
||||
"fastapi>=0.115.11",
|
||||
"jinja2>=3.1.6",
|
||||
"psycopg2-binary>=2.9.10",
|
||||
"redmail>=0.6.0",
|
||||
]
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc && rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /BankServices/SenderService/pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false && poetry install --no-interaction --no-ansi --no-root --only main && pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /BankServices/SenderService /
|
||||
|
||||
COPY /Schemas /Schemas
|
||||
COPY /Controllers /Controllers
|
||||
COPY /BankServices/ServiceDepends /
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application using the configured uvicorn server
|
||||
CMD ["poetry", "run", "python", "app.py"]
|
||||
|
|
@ -0,0 +1,172 @@
|
|||
import time
|
||||
import arrow
|
||||
from typing import List, Any
|
||||
|
||||
from Schemas import AccountRecords
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from Controllers.Email.send_email import EmailSendModel, EmailService
|
||||
|
||||
|
||||
def render_email_template(
|
||||
headers: List[str], rows: List[List[Any]], balance_error: bool, bank_balance: str
|
||||
) -> str:
|
||||
"""
|
||||
Render the HTML email template with the provided data.
|
||||
|
||||
Args:
|
||||
headers: List of column headers for the table
|
||||
rows: List of data rows for the table
|
||||
balance_error: Flag indicating if there's a balance discrepancy
|
||||
bank_balance: Current bank balance formatted as string
|
||||
|
||||
Returns:
|
||||
Rendered HTML template as string
|
||||
"""
|
||||
try:
|
||||
# Look for template in ServiceDepends directory
|
||||
env = Environment(loader=FileSystemLoader("/"))
|
||||
template = env.get_template("template_accounts.html")
|
||||
|
||||
return template.render(
|
||||
headers=headers,
|
||||
rows=rows,
|
||||
bank_balance=bank_balance,
|
||||
balance_error=balance_error,
|
||||
today=str(arrow.now().date()),
|
||||
)
|
||||
except Exception as e:
|
||||
print(f'Template rendering failed: {e}')
|
||||
raise
|
||||
|
||||
|
||||
def send_email_to_given_address(send_to: str, html_template: str, count_of_records: int) -> bool:
|
||||
"""
|
||||
Send email with the rendered HTML template to the specified address.
|
||||
|
||||
Args:
|
||||
send_to: Email address of the recipient
|
||||
html_template: Rendered HTML template content
|
||||
|
||||
Returns:
|
||||
Boolean indicating if the email was sent successfully
|
||||
"""
|
||||
today = arrow.now()
|
||||
subject = f"{str(today.date())} Gunes Apt. Cari Durum Kayıt Giriş Raporu"
|
||||
|
||||
# Create email parameters using EmailSendModel
|
||||
email_params = EmailSendModel(
|
||||
subject=subject + f" ({count_of_records} kayıt)",
|
||||
html=html_template,
|
||||
receivers=[send_to],
|
||||
text=f"Gunes Apt. Cari Durum Kayıt Giriş Raporu - {today.date()}"
|
||||
)
|
||||
|
||||
try:
|
||||
# Use the context manager to handle connection errors
|
||||
with EmailService.new_session() as email_session:
|
||||
# Send email through the service
|
||||
EmailService.send_email(email_session, email_params)
|
||||
print(f"Email successfully sent to: {send_to}")
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f'Failed to send email: {e}')
|
||||
return False
|
||||
|
||||
|
||||
def process_unsent_email_records() -> bool:
|
||||
"""
|
||||
Process account records that haven't been emailed yet.
|
||||
|
||||
Finds records with is_email_send=False, formats them into an email,
|
||||
sends the email, and updates the records as sent if successful.
|
||||
|
||||
Returns:
|
||||
bool: True if email was sent successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Use the context manager to handle database connections
|
||||
with AccountRecords.new_session() as db_session:
|
||||
# Query un-sent mail rows - with limit for display only
|
||||
account_records_query = AccountRecords.filter_all(
|
||||
AccountRecords.is_email_send == False,
|
||||
db=db_session,
|
||||
).query.order_by(AccountRecords.bank_date.asc()).limit(20)
|
||||
|
||||
account_records: List[AccountRecords] = account_records_query.all()
|
||||
if not account_records:
|
||||
print("No unsent email records found")
|
||||
return False
|
||||
|
||||
# Get the IDs of the records we're processing
|
||||
record_ids = [record.id for record in account_records]
|
||||
|
||||
print(f"Found {len(account_records)} unsent email records")
|
||||
|
||||
# Format rows for the email template
|
||||
list_of_rows = []
|
||||
for record in account_records:
|
||||
list_of_rows.append([
|
||||
record.bank_date.strftime("%d/%m/%Y %H:%M"),
|
||||
record.process_comment,
|
||||
f"{record.currency_value:,.2f}",
|
||||
f"{record.bank_balance:,.2f}"
|
||||
])
|
||||
|
||||
# Reverse list by date
|
||||
list_of_rows = list_of_rows[::-1]
|
||||
|
||||
# Get the most recent bank balance
|
||||
last_bank_balance = sorted(account_records, key=lambda x: x.bank_date, reverse=True)[0].bank_balance
|
||||
|
||||
# Define headers for the table
|
||||
headers = ["Ulaştığı Tarih", "Banka Transaksiyonu Ek Bilgi", "Aktarım Değeri", "Banka Bakiyesi"]
|
||||
|
||||
# Recipient email address
|
||||
send_to = "karatay@mehmetkaratay.com.tr"
|
||||
|
||||
# Render email template
|
||||
html_template = render_email_template(
|
||||
headers=headers,
|
||||
rows=list_of_rows,
|
||||
balance_error=False,
|
||||
bank_balance=f"{last_bank_balance:,.2f}",
|
||||
)
|
||||
|
||||
# Send the email
|
||||
if send_email_to_given_address(send_to=send_to, html_template=html_template, count_of_records=len(list_of_rows)):
|
||||
# Create a new query without limit for updating
|
||||
update_query = AccountRecords.filter_all(
|
||||
AccountRecords.id.in_(record_ids),
|
||||
db=db_session
|
||||
).query
|
||||
|
||||
# Update records as sent
|
||||
update_query.update({"is_email_send": True}, synchronize_session=False)
|
||||
AccountRecords.save(db_session)
|
||||
print(f"Successfully marked {len(account_records)} records as sent")
|
||||
return True
|
||||
|
||||
print("Email sending failed, records not updated")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f'Error processing unsent email records: {e}')
|
||||
return False
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("Starting Email Sender Service")
|
||||
|
||||
while True:
|
||||
try:
|
||||
result = process_unsent_email_records()
|
||||
if result:
|
||||
print("Email processing completed successfully")
|
||||
else:
|
||||
print("No emails sent in this iteration")
|
||||
except Exception as e:
|
||||
print(f"Unexpected error in main loop: {e}")
|
||||
|
||||
# Sleep for 60 seconds before next check
|
||||
print("Sleeping for 60 seconds")
|
||||
time.sleep(60)
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
[project]
|
||||
name = "routineemailservice"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"arrow>=1.3.0",
|
||||
"redbox>=0.2.1",
|
||||
"redis>=5.2.1",
|
||||
"pydantic-settings>=2.8.1",
|
||||
"sqlalchemy-mixins>=2.0.5",
|
||||
"fastapi>=0.115.11",
|
||||
"jinja2>=3.1.6",
|
||||
"psycopg2-binary>=2.9.10",
|
||||
"redmail>=0.6.0",
|
||||
]
|
||||
|
|
@ -0,0 +1,38 @@
|
|||
import os
|
||||
|
||||
|
||||
class Config:
|
||||
|
||||
MAILBOX: str = os.getenv("MAILBOX", "bilgilendirme@ileti.isbank.com.tr")
|
||||
MAIN_MAIL: str = os.getenv("MAIN_MAIL", "karatay.berkay@gmail.com")
|
||||
INFO_MAIL: str = os.getenv("INFO_MAIL", "mehmet.karatay@hotmail.com")
|
||||
EMAIL_HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34")
|
||||
EMAIL_SENDER_USERNAME: str = os.getenv(
|
||||
"EMAIL_SENDER_USERNAME", "karatay@mehmetkaratay.com.tr"
|
||||
)
|
||||
EMAIL_USERNAME: str = os.getenv("EMAIL_USERNAME", "isbank@mehmetkaratay.com.tr")
|
||||
EMAIL_PASSWORD: str = os.getenv("EMAIL_PASSWORD", "system")
|
||||
AUTHORIZE_IBAN: str = os.getenv("AUTHORIZE_IBAN", "4245-0093333")
|
||||
SERVICE_TIMING: int = int(os.getenv("SERVICE_TIMING", 900))
|
||||
EMAIL_PORT: int = int(os.getenv("EMAIL_PORT", 993))
|
||||
EMAIL_SEND_PORT: int = int(os.getenv("EMAIL_SEND_PORT", 587))
|
||||
EMAIL_SLEEP: int = int(os.getenv("EMAIL_SLEEP", 60))
|
||||
EMAIL_SEND: bool = bool(os.getenv("EMAIL_SEND", False))
|
||||
|
||||
|
||||
class EmailConfig:
|
||||
|
||||
EMAIL_HOST: str = os.getenv("EMAIL_HOST", "10.10.2.34")
|
||||
EMAIL_USERNAME: str = Config.EMAIL_SENDER_USERNAME
|
||||
EMAIL_PASSWORD: str = Config.EMAIL_PASSWORD
|
||||
EMAIL_PORT: int = Config.EMAIL_SEND_PORT
|
||||
EMAIL_SEND: bool = Config.EMAIL_SEND
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
return dict(
|
||||
host=EmailConfig.EMAIL_HOST,
|
||||
port=EmailConfig.EMAIL_PORT,
|
||||
username=EmailConfig.EMAIL_USERNAME,
|
||||
password=EmailConfig.EMAIL_PASSWORD,
|
||||
)
|
||||
|
|
@ -0,0 +1,54 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Gelen Banka Kayıtları</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
table, th, td {
|
||||
border: 1px solid black;
|
||||
}
|
||||
th, td {
|
||||
padding: 10px;
|
||||
text-align: left;
|
||||
}
|
||||
th {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Günaydın, Admin</h1>
|
||||
<br>
|
||||
<p>Banka Kayıtları : {{today}} </p>
|
||||
<p><b>Son Bakiye : {{bank_balance}} </b></p>
|
||||
<p><b>{{"Status : İkinci Bakiye Hatalı" if balance_error else "Status :OK"}}</b></p>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
{% for header in headers %}
|
||||
<th>{{ header }}</th>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
{% for cell in row %}
|
||||
<td>{{ cell }}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<p>Teşekkür ederiz,<br>Evyos Yönetim<br>Saygılarımızla</p>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -0,0 +1,28 @@
|
|||
FROM python:3.12-slim
|
||||
|
||||
WORKDIR /
|
||||
|
||||
# Install system dependencies and Poetry
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends gcc \
|
||||
&& rm -rf /var/lib/apt/lists/* && pip install --no-cache-dir poetry
|
||||
|
||||
# Copy Poetry configuration
|
||||
COPY /BankServices/WriterService/pyproject.toml ./pyproject.toml
|
||||
|
||||
# Configure Poetry and install dependencies with optimizations
|
||||
RUN poetry config virtualenvs.create false \
|
||||
&& poetry install --no-interaction --no-ansi --no-root --only main \
|
||||
&& pip cache purge && rm -rf ~/.cache/pypoetry
|
||||
|
||||
# Copy application code
|
||||
COPY /BankServices/WriterService /BankServices/WriterService
|
||||
COPY /BankServices/WriterService /
|
||||
COPY /Controllers /Controllers
|
||||
COPY /BankServices/ServiceDepends/config.py /BankServices/ServiceDepends/config.py
|
||||
COPY /Schemas /Schemas
|
||||
|
||||
# Set Python path to include app directory
|
||||
ENV PYTHONPATH=/ PYTHONUNBUFFERED=1 PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
# Run the application
|
||||
CMD ["python", "/BankServices/WriterService/app.py"]
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
# Writer Service
|
||||
|
||||
## Overview
|
||||
The Writer Service is the third and final component in the Redis pub/sub processing chain for bank-related email automation. It subscribes to messages with stage="parsed" from the Parser Service, writes the processed data to the database, and publishes a completion status with stage="written".
|
||||
|
||||
## Features
|
||||
|
||||
### Redis Integration
|
||||
- Subscribes to the "parser" Redis channel for messages with stage="parsed"
|
||||
- Processes parsed data and writes it to the database
|
||||
- Publishes completion status to the "writer" channel with stage="written"
|
||||
- Maintains message metadata and adds processing timestamps
|
||||
|
||||
### Database Integration
|
||||
- Writes parsed transaction data to AccountRecords database
|
||||
- Links transactions to build information via IBAN
|
||||
- Handles duplicate detection to prevent redundant entries
|
||||
- Adds date components for easier querying (year, month, day, weekday)
|
||||
|
||||
### Error Handling
|
||||
- Robust error management for database operations
|
||||
- Detailed logging of processing steps and errors
|
||||
- Graceful handling of malformed messages
|
||||
|
||||
## Configuration
|
||||
|
||||
### Environment Variables
|
||||
The service uses the same Redis configuration as the other services:
|
||||
```
|
||||
REDIS_HOST=10.10.2.15
|
||||
REDIS_PORT=6379
|
||||
REDIS_PASSWORD=your_strong_password_here
|
||||
```
|
||||
|
||||
## Deployment
|
||||
|
||||
### Docker
|
||||
The service is containerized using Docker and can be deployed using the provided Dockerfile and docker-compose configuration.
|
||||
|
||||
```bash
|
||||
# Build and start the service
|
||||
docker compose -f bank-services-docker-compose.yml up -d --build
|
||||
|
||||
# View logs
|
||||
docker compose -f bank-services-docker-compose.yml logs -f writer_service
|
||||
|
||||
# Stop the service
|
||||
docker compose -f bank-services-docker-compose.yml down
|
||||
```
|
||||
|
||||
### Service Management
|
||||
The `check_bank_services.sh` script provides a simple way to restart the service:
|
||||
|
||||
```bash
|
||||
./check_bank_services.sh
|
||||
```
|
||||
|
||||
## Architecture
|
||||
|
||||
### Redis Pub/Sub Chain
|
||||
This service is the third and final component in a multi-stage processing chain:
|
||||
1. **Email Service**: Reads emails, extracts attachments, publishes to "reader" channel with stage="red"
|
||||
2. **Parser Service**: Subscribes to "reader" channel, parses Excel data, publishes to "parser" channel with stage="parsed"
|
||||
3. **Writer Service** (this service): Subscribes to "parser" channel, writes data to database, publishes to "writer" channel with stage="written"
|
||||
|
||||
## Development
|
||||
|
||||
### Dependencies
|
||||
- Python 3.12
|
||||
- SQLAlchemy and PostgreSQL for database operations
|
||||
- Redis for pub/sub messaging
|
||||
- Arrow for date handling
|
||||
- FastAPI for potential API endpoints
|
||||
|
|
@ -0,0 +1,187 @@
|
|||
import time
|
||||
import arrow
|
||||
import json
|
||||
from typing import Dict, Any
|
||||
|
||||
from BankServices.WriterService.model import BankReceive
|
||||
from Schemas import AccountRecords, BuildIbans
|
||||
from BankServices.ServiceDepends.config import Config
|
||||
|
||||
# Import Redis pub/sub handler
|
||||
from Controllers.Redis.Broadcast.actions import redis_pubsub
|
||||
|
||||
# Define Redis channels
|
||||
REDIS_CHANNEL_IN = "parser" # Subscribe to Parser Service channel
|
||||
REDIS_CHANNEL_OUT = "writer" # Publish to Writer Service channel
|
||||
delimiter = "|"
|
||||
|
||||
|
||||
def publish_written_data_to_redis(data: Dict[str, Any], file_name: str) -> bool:
|
||||
"""Publish written data status to Redis.
|
||||
|
||||
Args:
|
||||
data: Original message data from Redis
|
||||
file_name: Name of the processed file
|
||||
|
||||
Returns:
|
||||
bool: Success status
|
||||
"""
|
||||
# Create a copy of the original message to preserve metadata
|
||||
message = data.copy() if isinstance(data, dict) else {}
|
||||
|
||||
# Update stage to 'written'
|
||||
message["stage"] = "written"
|
||||
|
||||
# Add processing timestamp
|
||||
message["written_at"] = str(arrow.now())
|
||||
|
||||
# Publish to Redis channel
|
||||
result = redis_pubsub.publisher.publish(REDIS_CHANNEL_OUT, message)
|
||||
|
||||
if result.status:
|
||||
print(f"[WRITER_SERVICE] Published written status for {file_name} with stage: written")
|
||||
return True
|
||||
else:
|
||||
print(f"[WRITER_SERVICE] Publish error: {result.error}")
|
||||
return False
|
||||
|
||||
|
||||
def write_parsed_data_to_account_records(data_dict: dict, file_name: str) -> bool:
|
||||
"""Write parsed data to account records database.
|
||||
|
||||
Args:
|
||||
data_dict: Parsed data dictionary
|
||||
|
||||
Returns:
|
||||
bool: True if record was created or already exists, False on error
|
||||
"""
|
||||
try:
|
||||
with AccountRecords.new_session() as db_session:
|
||||
# Transform data for database
|
||||
data_dict["bank_balance"] = data_dict.pop("balance")
|
||||
data_dict["import_file_name"] = file_name
|
||||
data_dict = BankReceive(**data_dict).model_dump()
|
||||
print('data_dict', data_dict)
|
||||
|
||||
# Process date fields
|
||||
bank_date = arrow.get(str(data_dict["bank_date"]))
|
||||
data_dict["bank_date_w"] = bank_date.weekday()
|
||||
data_dict["bank_date_m"] = bank_date.month
|
||||
data_dict["bank_date_d"] = bank_date.day
|
||||
data_dict["bank_date_y"] = bank_date.year
|
||||
data_dict["bank_date"] = str(bank_date)
|
||||
|
||||
# Add build information if available
|
||||
if build_iban := BuildIbans.filter_by_one(
|
||||
iban=data_dict["iban"], db=db_session
|
||||
).data:
|
||||
data_dict.update(
|
||||
{
|
||||
"build_id": build_iban.build_id,
|
||||
"build_uu_id": build_iban.build_uu_id,
|
||||
}
|
||||
)
|
||||
|
||||
# Create new record or find existing one using specific fields for matching
|
||||
new_account_record = AccountRecords.find_or_create(
|
||||
db=db_session,
|
||||
**data_dict,
|
||||
include_args=[
|
||||
AccountRecords.bank_date,
|
||||
AccountRecords.iban,
|
||||
AccountRecords.bank_reference_code,
|
||||
AccountRecords.bank_balance
|
||||
]
|
||||
)
|
||||
if new_account_record.meta_data.created:
|
||||
new_account_record.is_confirmed = True
|
||||
new_account_record.save(db=db_session)
|
||||
print(f"[WRITER_SERVICE] Created new record in database: {new_account_record.id}")
|
||||
return True
|
||||
else:
|
||||
print(f"[WRITER_SERVICE] Record already exists in database: {new_account_record.id}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"[WRITER_SERVICE] Error writing to database: {str(e)}")
|
||||
return False
|
||||
|
||||
|
||||
def process_message(message):
|
||||
"""Process a message from Redis.
|
||||
|
||||
Args:
|
||||
message: Message data from Redis subscriber
|
||||
"""
|
||||
# Extract the message data
|
||||
data = message["data"]
|
||||
|
||||
# If data is a string, parse it as JSON
|
||||
if isinstance(data, str):
|
||||
try:
|
||||
data = json.loads(data)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"[WRITER_SERVICE] Error parsing message data: {e}")
|
||||
return
|
||||
|
||||
# Check if stage is 'parsed' before processing
|
||||
if data.get("stage") == "parsed":
|
||||
try:
|
||||
file_name = data.get("filename")
|
||||
parsed_data = data.get("parsed")
|
||||
|
||||
print(f"[WRITER_SERVICE] Processing file: {file_name}")
|
||||
|
||||
if not parsed_data:
|
||||
print(f"[WRITER_SERVICE] No parsed data found for {file_name}")
|
||||
return
|
||||
|
||||
# Process each parsed data item
|
||||
success = True
|
||||
for item in parsed_data:
|
||||
result = write_parsed_data_to_account_records(data_dict=item, file_name=file_name)
|
||||
if not result:
|
||||
success = False
|
||||
|
||||
# Publish status update to Redis if all records were processed
|
||||
if success:
|
||||
publish_written_data_to_redis(data=data, file_name=file_name)
|
||||
except Exception as e:
|
||||
print(f"[WRITER_SERVICE] Error processing message: {str(e)}")
|
||||
else:
|
||||
print(f"[WRITER_SERVICE] Skipped message with UUID: {data.get('uuid')} (stage is not 'parsed')")
|
||||
|
||||
|
||||
def app():
|
||||
"""Main application function."""
|
||||
print("[WRITER_SERVICE] Starting Writer Service")
|
||||
|
||||
# Subscribe to the input channel
|
||||
result = redis_pubsub.subscriber.subscribe(REDIS_CHANNEL_IN, process_message)
|
||||
|
||||
if result.status:
|
||||
print(f"[WRITER_SERVICE] Subscribed to channel: {REDIS_CHANNEL_IN}")
|
||||
else:
|
||||
print(f"[WRITER_SERVICE] Subscribe error: {result.error}")
|
||||
return
|
||||
|
||||
# Start listening for messages
|
||||
listen_result = redis_pubsub.subscriber.start_listening(in_thread=True)
|
||||
|
||||
if listen_result.status:
|
||||
print("[WRITER_SERVICE] Listening for messages")
|
||||
else:
|
||||
print(f"[WRITER_SERVICE] Error starting listener: {listen_result.error}")
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Initialize the app once
|
||||
app()
|
||||
|
||||
# Keep the main thread alive
|
||||
try:
|
||||
while True:
|
||||
time.sleep(Config.EMAIL_SLEEP)
|
||||
except KeyboardInterrupt:
|
||||
print("\n[WRITER_SERVICE] Stopping service...")
|
||||
redis_pubsub.subscriber.stop_listening()
|
||||
|
|
@ -0,0 +1,17 @@
|
|||
from typing import Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class BankReceive(BaseModel):
|
||||
import_file_name: str
|
||||
iban: str
|
||||
bank_date: str
|
||||
channel_branch: str
|
||||
currency: Optional[str] = "TL"
|
||||
currency_value: float
|
||||
bank_balance: float
|
||||
additional_balance: float
|
||||
process_name: str
|
||||
process_type: str
|
||||
process_comment: str
|
||||
bank_reference_code: str
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
[project]
|
||||
name = "writerservice"
|
||||
version = "0.1.0"
|
||||
description = "Writer Service for bank email attachments using Redis pub/sub"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"arrow>=1.3.0",
|
||||
"fastapi>=0.115.11",
|
||||
"psycopg2-binary>=2.9.10",
|
||||
"redis>=5.0.1",
|
||||
"sqlalchemy-mixins>=2.0.5",
|
||||
"pydantic>=2.5.2",
|
||||
"pydantic-settings>=2.8.1",
|
||||
]
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Gelen Banka Kayıtları</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
margin: 20px;
|
||||
}
|
||||
table {
|
||||
width: 100%;
|
||||
border-collapse: collapse;
|
||||
}
|
||||
table,
|
||||
th,
|
||||
td {
|
||||
border: 1px solid black;
|
||||
}
|
||||
th,
|
||||
td {
|
||||
padding: 10px;
|
||||
text-align: left;
|
||||
}
|
||||
th {
|
||||
background-color: #f2f2f2;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Günaydın, Admin</h1>
|
||||
<br />
|
||||
<p>Banka Kayıtları : {{today}}</p>
|
||||
<p><b>Son Bakiye : {{bank_balance}} </b></p>
|
||||
<p>
|
||||
<b
|
||||
>{{"Status : İkinci Bakiye Hatalı" if balance_error else "Status
|
||||
:OK"}}</b
|
||||
>
|
||||
</p>
|
||||
<table border="1">
|
||||
<thead>
|
||||
<tr>
|
||||
{% for header in headers %}
|
||||
<th>{{ header }}</th>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for row in rows %}
|
||||
<tr>
|
||||
{% for cell in row %}
|
||||
<td>{{ cell }}</td>
|
||||
{% endfor %}
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
<p>Teşekkür ederiz,<br />Evyos Yönetim<br />Saygılarımızla</p>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -10,19 +10,18 @@ class Configs(BaseSettings):
|
|||
USERNAME: str = ""
|
||||
PASSWORD: str = ""
|
||||
PORT: int = 0
|
||||
SEND: bool = False
|
||||
SEND: bool = 0
|
||||
|
||||
@property
|
||||
def is_send(self):
|
||||
return bool(self.SEND)
|
||||
|
||||
@classmethod
|
||||
def as_dict(cls):
|
||||
def as_dict(self):
|
||||
return dict(
|
||||
host=cls.EMAIL_HOST,
|
||||
port=cls.EMAIL_PORT,
|
||||
username=cls.EMAIL_USERNAME,
|
||||
password=cls.EMAIL_PASSWORD,
|
||||
host=self.HOST,
|
||||
port=self.PORT,
|
||||
username=self.USERNAME,
|
||||
password=self.PASSWORD,
|
||||
)
|
||||
|
||||
model_config = SettingsConfigDict(env_prefix="EMAIL_")
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
from redmail import EmailSender
|
||||
from typing import List, Optional, Dict
|
||||
from pydantic import BaseModel
|
||||
from config import Configs
|
||||
from contextlib import contextmanager
|
||||
from .config import email_configs
|
||||
|
||||
|
||||
class EmailSendModel(BaseModel):
|
||||
|
|
@ -23,11 +23,10 @@ class EmailSession:
|
|||
|
||||
def send(self, params: EmailSendModel) -> bool:
|
||||
"""Send email using this session."""
|
||||
if not Configs.is_send:
|
||||
if not email_configs.is_send:
|
||||
print("Email sending is disabled", params)
|
||||
return False
|
||||
|
||||
receivers = [Configs.USERNAME]
|
||||
receivers = [email_configs.USERNAME]
|
||||
self.email_sender.send(
|
||||
subject=params.subject,
|
||||
receivers=receivers,
|
||||
|
|
@ -53,7 +52,7 @@ class EmailService:
|
|||
@contextmanager
|
||||
def new_session(cls):
|
||||
"""Create and yield a new email session with active connection."""
|
||||
email_sender = EmailSender(**Configs.as_dict())
|
||||
email_sender = EmailSender(**email_configs.as_dict())
|
||||
session = EmailSession(email_sender)
|
||||
try:
|
||||
email_sender.connect()
|
||||
|
|
|
|||
|
|
@ -216,6 +216,7 @@ class CRUDModel:
|
|||
cls,
|
||||
db: Session,
|
||||
exclude_args: Optional[list[InstrumentedAttribute]] = None,
|
||||
include_args: Optional[list[InstrumentedAttribute]] = None,
|
||||
**kwargs,
|
||||
):
|
||||
"""
|
||||
|
|
@ -224,6 +225,7 @@ class CRUDModel:
|
|||
Args:
|
||||
db: Database session
|
||||
exclude_args: Keys to exclude from search
|
||||
include_args: Keys to specifically include in search (if provided, only these will be used)
|
||||
**kwargs: Search/creation criteria
|
||||
|
||||
Returns:
|
||||
|
|
@ -239,8 +241,16 @@ class CRUDModel:
|
|||
exclude_args = exclude_args or []
|
||||
exclude_args = [exclude_arg.key for exclude_arg in exclude_args]
|
||||
|
||||
include_args = include_args or []
|
||||
include_args = [include_arg.key for include_arg in include_args]
|
||||
|
||||
# If include_args is provided, only use those fields for matching
|
||||
# Otherwise, use all fields except those in exclude_args
|
||||
for key, value in kwargs.items():
|
||||
if hasattr(cls, key) and key not in exclude_args:
|
||||
if hasattr(cls, key):
|
||||
if include_args and key in include_args:
|
||||
query = query.filter(getattr(cls, key) == value)
|
||||
elif not include_args and key not in exclude_args:
|
||||
query = query.filter(getattr(cls, key) == value)
|
||||
|
||||
already_record = query.first()
|
||||
|
|
|
|||
|
|
@ -0,0 +1,128 @@
|
|||
services:
|
||||
email_service:
|
||||
container_name: email_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: BankServices/EmailService/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
environment:
|
||||
- MAILBOX=bilgilendirme@ileti.isbank.com.tr
|
||||
- MAIN_MAIL=karatay.berkay@gmail.com
|
||||
- INFO_MAIL=mehmet.karatay@hotmail.com
|
||||
- EMAIL_HOST=10.10.2.34
|
||||
- EMAIL_USERNAME=isbank@mehmetkaratay.com.tr
|
||||
- EMAIL_PASSWORD=system
|
||||
- EMAIL_PORT=993
|
||||
- EMAIL_SEND_PORT=587
|
||||
- EMAIL_SLEEP=60
|
||||
- AUTHORIZE_IBAN=4245-0093333
|
||||
- REDIS_HOST=10.10.2.15
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=your_strong_password_here
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- tempory-email-service:/tmp
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
parser_service:
|
||||
container_name: parser_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: BankServices/ParserService/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
environment:
|
||||
- REDIS_HOST=10.10.2.15
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=your_strong_password_here
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
writer_service:
|
||||
container_name: writer_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: BankServices/WriterService/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
environment:
|
||||
- REDIS_HOST=10.10.2.15
|
||||
- REDIS_PORT=6379
|
||||
- REDIS_PASSWORD=your_strong_password_here
|
||||
- DB_HOST=10.10.2.14
|
||||
- DB_PORT=5432
|
||||
- DB_USER=postgres
|
||||
- DB_PASSWORD=password
|
||||
- DB_NAME=postgres
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
routine_email_service:
|
||||
container_name: routine_email_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: BankServices/RoutineEmailService/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
environment:
|
||||
- EMAIL_HOST=10.10.2.34
|
||||
- EMAIL_USERNAME=karatay@mehmetkaratay.com.tr
|
||||
- EMAIL_PASSWORD=system
|
||||
- EMAIL_PORT=587
|
||||
- EMAIL_SEND=1
|
||||
- DB_HOST=10.10.2.14
|
||||
- DB_PORT=5432
|
||||
- DB_USER=postgres
|
||||
- DB_PASSWORD=password
|
||||
- DB_NAME=postgres
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
sender_service:
|
||||
container_name: sender_service
|
||||
build:
|
||||
context: .
|
||||
dockerfile: BankServices/SenderService/Dockerfile
|
||||
networks:
|
||||
- bank-services-network
|
||||
environment:
|
||||
- EMAIL_HOST=10.10.2.34
|
||||
- EMAIL_USERNAME=karatay@mehmetkaratay.com.tr
|
||||
- EMAIL_PASSWORD=system
|
||||
- EMAIL_PORT=587
|
||||
- EMAIL_SEND=1
|
||||
- DB_HOST=10.10.2.14
|
||||
- DB_PORT=5432
|
||||
- DB_USER=postgres
|
||||
- DB_PASSWORD=password
|
||||
- DB_NAME=postgres
|
||||
restart: unless-stopped
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
networks:
|
||||
bank-services-network:
|
||||
driver: bridge
|
||||
|
||||
volumes:
|
||||
tempory-email-service:
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Simple script to restart bank services
|
||||
# Created: 2025-04-20
|
||||
|
||||
# Set the working directory to the script's location
|
||||
cd "$(dirname "$0")"
|
||||
echo "Working directory: $(pwd)"
|
||||
|
||||
# Configuration
|
||||
COMPOSE_FILE="bank-services-docker-compose.yml"
|
||||
|
||||
# Check if the compose file exists
|
||||
if [ ! -f "$COMPOSE_FILE" ]; then
|
||||
echo "ERROR: Compose file $COMPOSE_FILE not found in $(pwd)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S') Stopping any running bank services..."
|
||||
docker compose -f "$COMPOSE_FILE" down
|
||||
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S') Rebuilding and starting bank services..."
|
||||
docker compose -f "$COMPOSE_FILE" up --build -d
|
||||
|
||||
echo "$(date '+%Y-%m-%d %H:%M:%S') Bank services restart completed"
|
||||
exit 0
|
||||
Loading…
Reference in New Issue