Compare commits

..

2 Commits

Author SHA1 Message Date
Ryann
c53c0f8814 lots of fixes 2025-03-23 13:15:57 +00:00
Ryann
8cb43b05a8 lots of fixes 2025-03-23 13:14:14 +00:00
34 changed files with 687 additions and 81 deletions

3
.gitignore vendored
View File

@ -4,4 +4,5 @@ config
logs
api/.env
api/node_modules
certs
certs
backups

View File

@ -103,13 +103,14 @@ app.use((err, req, res, next) => {
});
const cron = require('node-cron');
const { cleanupOrphanedMailboxes, cleanupUnmatchedAndExpired, cleanupInactiveMailboxes } = require('./scripts/cleanup');
const { cleanupOrphanedMailboxes, cleanupUnmatchedAndExpired, cleanupInactiveMailboxes, cleanupInactiveUsers } = require('./scripts/cleanup');
cron.schedule('0 0 * * *', async () => {
console.log('Running mailbox cleanup job');
try {
await cleanupOrphanedMailboxes();
await cleanupUnmatchedAndExpired();
await cleanupInactiveMailboxes();
await cleanupInactiveUsers();
console.log('Mailbox cleanup completed successfully');
} catch (error) {
console.error('Mailbox cleanup failed:', error);

View File

@ -1,17 +1,19 @@
const path = require('path');
require('dotenv').config();
const { models } = require(path.resolve(process.env.ROOT_PATH, './db/db.js'));
const BaseController = require(path.resolve(process.env.ROOT_PATH, './controllers/BaseController.js'));
const jwt = require('jsonwebtoken');
const bcrypt = require('bcrypt');
const crypto = require('crypto');
const { format, differenceInHours, addHours } = require('date-fns');
const sendMail = require('../utils/sendMail.js');
const HaveIBeenPwnedAPI = require('../utils/hibp.js');
class AuthController extends BaseController {
constructor() {
super();
this.protected('me', 'refreshToken', 'login');
this.admin('register');
}
/**
@ -60,8 +62,16 @@ class AuthController extends BaseController {
if (!passwordMatch) {
return res.status(401).json({ error: 'Invalid username or password' });
}
const token = jwt.sign({ id: user.id, username: user.username, is_admin: user.is_admin, email: user.email }, process.env.JWT_SECRET, { expiresIn: '6h' });
res.json({ api_key: token });
const validToken = jwt.verify(user.api_key, process.env.JWT_SECRET);
if (!validToken) {
const token = jwt.sign({ id: user.id, username: user.username, is_admin: user.is_admin, email: user.email }, process.env.JWT_SECRET, { expiresIn: '6h' });
await models.User.query().update({ api_key: token, last_login: format(new Date(), 'yyyy-MM-dd HH:mm:ss') }).where('id', user.id);
return res.json({ api_key: token });
}
await models.User.query().update({ last_login: format(new Date(), 'yyyy-MM-dd HH:mm:ss') }).where('id', user.id);
res.json({ api_key: user.api_key });
}
/**
@ -159,29 +169,66 @@ class AuthController extends BaseController {
}
async register(req, res) {
console.log(req.body);
const { username, email, password } = req.body;
const hibp = new HaveIBeenPwnedAPI();
try {
const checkBreached = await hibp.checkPassword(password);
if (checkBreached.isCompromised) {
return res.status(400).json({ error: 'Password is compromised. Checked against haveibeenpwned.com' });
}
const validatePassword = await hibp.validatePassword(password, { maxExposures: 0 });
if (!validatePassword.isValid) {
return res.status(400).json({ error: 'Password is not valid. Checked against haveibeenpwned.com' });
}
} catch (error) {
console.error(error);
return res.status(500).json({ error: 'Failed to check password' });
}
const dupUser = await models.User.query().where('username', username).orWhere('email', email).first();
if (dupUser) {
return res.status(400).json({ error: 'Username or email already exists' });
}
const invite_token = crypto.randomBytes(32).toString('hex');
const salt = await bcrypt.genSalt(10);
const passwordEncrypted = await bcrypt.hash(password, salt);
const newEmail = `${username}@${crypto.randomBytes(10).toString('hex')}.com`;
const user = await models.User.query().insert({
username,
password: passwordEncrypted,
email,
email: newEmail,
is_admin: 0,
is_active: 0,
created: format(new Date(), 'yyyy-MM-dd HH:mm:ss'),
modified: format(new Date(), 'yyyy-MM-dd HH:mm:ss')
modified: format(new Date(), 'yyyy-MM-dd HH:mm:ss'),
last_login: format(new Date(), 'yyyy-MM-dd HH:mm:ss')
});
await models.Invite.query().insert({
user_id: user.id,
token: invite_token,
expires_at: format(addHours(new Date(), 12), 'yyyy-MM-dd HH:mm:ss')
expires: format(addHours(new Date(), 12), 'yyyy-MM-dd HH:mm:ss'),
created: format(new Date(), 'yyyy-MM-dd HH:mm:ss')
});
await sendMail(
email,
'2weekmail - Invite Code',
`Welcome to the 2weekmail. Please use the link below to activate your account.
<br>
<a href="https://2weekmail.fyi/auth/activate/${invite_token}">activate your account</a>`,
`Welcome to the 2weekmail. Please use it to <a href="https://2weekmail.fyi/auth/activate/${invite_token}">activate your account</a>.`
);
res.json({
message: 'User registered successfully',
invite_token
message: 'User registered successfully'
});
}
@ -190,7 +237,7 @@ class AuthController extends BaseController {
}
async activate(req, res) {
const { invite_token } = req.body;
const invite_token = req.params.token;
const invite = await models.Invite.query().where('token', invite_token).first();
if (!invite) {
return res.status(400).json({ error: 'Invalid invite token' });
@ -207,12 +254,15 @@ class AuthController extends BaseController {
await models.User.query().update({
is_active: 1,
modified: format(new Date(), 'yyyy-MM-dd HH:mm:ss')
modified: format(new Date(), 'yyyy-MM-dd HH:mm:ss'),
last_login: format(new Date(), 'yyyy-MM-dd HH:mm:ss')
}).where('id', user.id);
res.json({
message: 'User activated successfully',
invite_token
await models.Invite.query().delete().where('id', invite.id);
return res.json({
success: true,
message: 'User activated successfully'
});
}
}

View File

@ -94,7 +94,6 @@ class MailboxController extends BaseController {
const mailData = {
id: mailbox.id,
username: mailbox.username,
password: password.password,
expires: mailbox.expires
}

View File

@ -0,0 +1,20 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function(knex) {
return knex.schema.alterTable('users', (table) => {
table.tinyint('ignore').defaultTo(0);
table.datetime('last_login').nullable();
});
};
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) {
return knex.schema.alterTable('users', (table) => {
table.dropColumn('ignore');
});
};

View File

@ -13,14 +13,14 @@ class Invite extends BaseModel {
static get jsonSchema() {
return {
type: 'object',
required: ['user_id', 'token', 'expires_at'],
required: ['user_id', 'token', 'expires'],
properties: {
id: { type: 'integer' },
user_id: { type: 'string', minLength: 1, maxLength: 255 },
user_id: { type: 'integer' },
token: { type: 'string', minLength: 1 },
expires_at: { type: 'string', format: 'date-time' },
created_at: { type: 'string', format: 'date-time' },
expires: { type: 'string', format: 'date-time' },
created: { type: 'string', format: 'date-time' },
}
};
}

View File

@ -12,17 +12,17 @@ class User extends BaseModel {
static get jsonSchema() {
return {
type: 'object',
required: ['username', 'password', 'email'],
properties: {
id: { type: 'integer' },
username: { type: 'string', minLength: 1, maxLength: 255 },
password: { type: 'string', minLength: 1 },
email: { type: 'string', minLength: 1, maxLength: 255 },
is_admin: { type: 'boolean', default: false },
is_active: { type: 'boolean', default: true },
is_admin: { type: 'integer', default: 0 },
is_active: { type: 'integer', default: 0 },
api_key: { type: ['string', 'null'] },
created: { type: 'string', format: 'date-time' },
modified: { type: 'string', format: 'date-time' }
modified: { type: 'string', format: 'date-time' },
ignore: { type: 'integer', default: 0 }
}
};
}

View File

@ -5,8 +5,10 @@ const { format } = require("date-fns");
* @returns { Promise<void> }
*/
exports.seed = async function(knex) {
// Deletes ALL existing entries
await knex('users').del()
const user = await knex('users').where('username', 'admin').first();
if (user) {
await knex('users').where('id', user.id).del();
}
await knex('users').insert([
{
username: 'admin',

1
api/public/css/sweetalert2.min.css vendored Normal file

File diff suppressed because one or more lines are too long

BIN
api/public/img/favicon.ico Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

BIN
api/public/img/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

File diff suppressed because one or more lines are too long

View File

@ -3,8 +3,8 @@ const AuthController = require(path.resolve(process.env.ROOT_PATH, './controller
const router = require('express').Router();
router.post('/register', AuthController.register);
router.post('/activate', AuthController.activate);
router.get('/activate/:token', AuthController.activateView);
router.post('/activate/:token', AuthController.activate);
router.get('/register', AuthController.registerView);
router.get('/activate', AuthController.activateView);
module.exports = router;

View File

@ -1,8 +1,8 @@
const fs = require('fs').promises;
const path = require('path');
const { models } = require('../db/db');
const { Mailbox, Alias } = models;
const { format, isBefore } = require('date-fns');
const { Mailbox, Alias, User } = models;
const { format, isBefore, subDays } = require('date-fns');
async function scanMailboxDirectories(callback) {
const mailPath = '/var/mail';
@ -192,9 +192,31 @@ async function cleanupInactiveMailboxes() {
}
}
async function cleanupInactiveUsers() {
try {
const cutoffDate = format(subDays(new Date(), 30), 'yyyy-MM-dd HH:mm:ss');
const result = await User.query()
.delete()
.where('ignore', 0)
.where('last_login', '<', cutoffDate)
.returning('username');
const removed = result.length;
result.forEach(user => {
console.log(`Removing inactive user: ${user.username}`);
});
console.log(`Cleanup of inactive users completed - removed ${removed} users`);
} catch (error) {
console.error('Error during cleanup:', error);
}
}
module.exports = {
cleanupOrphanedMailboxes,
cleanupUnmatchedAndExpired,
cleanupInactiveMailboxes
cleanupInactiveMailboxes,
cleanupInactiveUsers
};

View File

@ -1,6 +1,60 @@
const { models } = require('../db/db.js');
const nodemailer = require('nodemailer');
require('dotenv').config();
(async () => {
const domains = await models.Domain.query();
console.log(domains);
})();
// Create a transport configuration for Postfix
const transport = nodemailer.createTransport({
host: '172.18.0.3', // Use the Docker service name for Postfix
// or host: 'mail', // depending on your docker-compose service name
port: 587, // Default SMTP port
secure: false, // TLS is not required for local Postfix
auth: {
user: process.env.SMTP_USER, // Add your SMTP username
pass: process.env.SMTP_PASS // Add your SMTP password
},
tls: {
rejectUnauthorized: false // Allow self-signed certificates
}
});
/**
* Sends an email using the configured Postfix transport
* @param {string} to - Recipient email address
* @param {string} subject - Email subject
* @param {string} text - Plain text email content
* @param {string} html - HTML email content (optional)
* @returns {Promise} Result of sending the email
*/
async function sendEmail(to, subject, text, html) {
try {
const mailOptions = {
from: process.env.SMTP_FROM, // Replace with your sender email
to: to,
subject: subject,
text: text,
html: html || text // Use HTML if provided, otherwise use plain text
};
const info = await transport.sendMail(mailOptions);
console.log('Email sent successfully:', info.messageId);
return info;
} catch (error) {
console.error('Error sending email:', error);
throw error;
}
}
// Example usage
async function main() {
try {
await sendEmail(
'ryancarr10@gmail.com',
'Test Email',
'This is a test email from Postfix',
'<h1>This is a test email from Postfix</h1>'
);
} catch (error) {
console.error('Main error:', error);
}
}
main();

View File

92
api/utils/hibp.js Normal file
View File

@ -0,0 +1,92 @@
const crypto = require('crypto');
const axios = require('axios');
class HaveIBeenPwnedAPI {
constructor(config = {}) {
this.baseUrl = 'https://api.pwnedpasswords.com';
this.userAgent = config.userAgent || 'PasswordSecurityChecker';
}
/**
* Generates a SHA-1 hash of the password
* @param {string} password - The password to hash
* @returns {string} The uppercase SHA-1 hash
*/
generateHash(password) {
return crypto
.createHash('sha1')
.update(password)
.digest('hex')
.toUpperCase();
}
/**
* Checks if a password has been exposed in known data breaches
* @param {string} password - The password to check
* @returns {Promise<{isCompromised: boolean, timesExposed: number}>}
*/
async checkPassword(password) {
try {
// Generate hash and get first 5 characters for k-anonymity
const hash = this.generateHash(password);
const hashPrefix = hash.substring(0, 5);
const hashSuffix = hash.substring(5);
// Make request to HIBP API
const response = await axios.get(`${this.baseUrl}/range/${hashPrefix}`, {
headers: {
'User-Agent': this.userAgent
}
});
// Parse response and check if password hash suffix exists
const hashes = response.data.split('\n');
const match = hashes.find(h => h.split(':')[0] === hashSuffix);
if (match) {
const timesExposed = parseInt(match.split(':')[1]);
return {
isCompromised: true,
timesExposed
};
}
return {
isCompromised: false,
timesExposed: 0
};
} catch (error) {
throw new Error(`Failed to check password: ${error.message}`);
}
}
/**
* Validates a password against HIBP and custom rules
* @param {string} password - The password to validate
* @param {Object} options - Validation options
* @param {number} options.maxExposures - Maximum allowed exposures (default: 0)
* @returns {Promise<{isValid: boolean, reason?: string}>}
*/
async validatePassword(password, options = { maxExposures: 0 }) {
try {
const result = await this.checkPassword(password);
if (result.timesExposed > options.maxExposures) {
return {
isValid: false,
reason: `Password has been exposed ${result.timesExposed} times in data breaches`
};
}
return {
isValid: true
};
} catch (error) {
throw new Error(`Password validation failed: ${error.message}`);
}
}
}
module.exports = HaveIBeenPwnedAPI;

46
api/utils/sendMail.js Normal file
View File

@ -0,0 +1,46 @@
const nodemailer = require('nodemailer');
require('dotenv').config();
// Create a transport configuration for Postfix
const transport = nodemailer.createTransport({
host: '172.18.0.3', // Use the Docker service name for Postfix
// or host: 'mail', // depending on your docker-compose service name
port: 587, // Default SMTP port
secure: false, // TLS is not required for local Postfix
auth: {
user: process.env.SMTP_USER, // Add your SMTP username
pass: process.env.SMTP_PASS // Add your SMTP password
},
tls: {
rejectUnauthorized: false // Allow self-signed certificates
}
});
/**
* Sends an email using the configured Postfix transport
* @param {string} to - Recipient email address
* @param {string} subject - Email subject
* @param {string} text - Plain text email content
* @param {string} html - HTML email content (optional)
* @returns {Promise} Result of sending the email
*/
async function sendMail(to, subject, text, html) {
try {
const mailOptions = {
from: process.env.SMTP_FROM,
to: to,
subject: subject,
text: text,
html: html || text
};
const info = await transport.sendMail(mailOptions);
console.log('Email sent successfully:', info.messageId);
return info;
} catch (error) {
console.error('Error sending email:', error);
throw error;
}
}
module.exports = sendMail;

View File

@ -176,15 +176,15 @@
</div>
<div class="col-md-3 col-6 mb-3">
<i class="fas fa-database fa-3x text-primary mb-2"></i>
<p>MongoDB</p>
<p>MySQL (knex.js & objection.js)</p>
</div>
<div class="col-md-3 col-6 mb-3">
<i class="fas fa-lock fa-3x text-primary mb-2"></i>
<p>Encryption</p>
<p>Encryption (bcrypt, jwt)</p>
</div>
<div class="col-md-3 col-6 mb-3">
<i class="fas fa-server fa-3x text-primary mb-2"></i>
<p>Cloud Hosting</p>
<p>Cloud Hosting (Cherry Servers)</p>
</div>
</div>
</div>

View File

@ -15,7 +15,7 @@
<div id="success" class="d-none">
<i class="fas fa-check-circle text-success fa-5x mb-3"></i>
<h4 class="mb-3">Account Successfully Activated!</h4>
<p>Your API key has been generated and sent to your email address.</p>
<p>Use your username and password to login on the API and get your API key.</p>
<div class="alert alert-info mt-4">
<p class="mb-0"><strong>Important:</strong> Keep your API key secure and do not share it with others.</p>
</div>
@ -33,22 +33,33 @@
</div>
</div>
{{#section 'scripts'}}
<script>
// Simulate activation process
document.addEventListener('DOMContentLoaded', function() {
// Get activation token from URL
const urlParams = new URLSearchParams(window.location.search);
const token = urlParams.get('token');
const token = window.location.pathname.split('/activate/')[1];
// This would be replaced with an actual API call
setTimeout(() => {
document.getElementById('loading').classList.add('d-none');
if (token) {
document.getElementById('success').classList.remove('d-none');
} else {
document.getElementById('error').classList.remove('d-none');
}
}, 2000);
if (token) {
$.ajax({
url: `/auth/activate/${token}`,
method: 'POST',
success: function(response) {
$('#loading').addClass('d-none');
$('#success').removeClass('d-none');
},
error: function(xhr, status, error) {
console.error('Activation error:', error);
$('#loading').addClass('d-none');
$('#error').removeClass('d-none');
if (xhr.responseJSON && xhr.responseJSON.error) {
$('#error p').text(xhr.responseJSON.error);
}
}
});
} else {
$('#loading').addClass('d-none');
$('#error').removeClass('d-none');
}
});
</script>
{{/section}}

View File

@ -5,27 +5,29 @@
<h3 class="mb-0">Register for API Access</h3>
</div>
<div class="card-body">
<form id="registerForm" action="/auth/register" method="POST">
<form id="registerForm">
<div class="mb-3">
<label for="email" class="form-label">Email address</label>
<input type="email" class="form-control" id="email" name="email" required>
<div class="form-text">We'll send a verification link to this email.</div>
<div class="form-text">We'll send a verification link to this email.
<p>You're email will be deleted after verification.<br>
<b>Remember your password! No resets will be allowed.</b></p>
</div>
</div>
<div class="mb-3">
<label for="name" class="form-label">Full Name</label>
<input type="text" class="form-control" id="name" name="name" required>
<label for="username" class="form-label">Username</label>
<input type="text" class="form-control" id="username" name="username" required>
</div>
<div class="mb-3">
<label for="organization" class="form-label">Organization (Optional)</label>
<input type="text" class="form-control" id="organization" name="organization">
<label for="password" class="form-label">Password</label>
<input type="password" class="form-control" id="password" name="password" required>
</div>
<div class="mb-3">
<label for="usage" class="form-label">Intended Usage</label>
<textarea class="form-control" id="usage" name="usage" rows="3" required></textarea>
<div class="form-text">Briefly describe how you plan to use our API.</div>
<label for="confirm_password" class="form-label">Confirm Password</label>
<input type="password" class="form-control" id="confirm_password" name="confirm_password" required>
</div>
<div class="mb-3 form-check">
@ -33,9 +35,65 @@
<label class="form-check-label" for="terms">I agree to the <a href="/terms">Terms of Service</a> and <a href="/privacy">Privacy Policy</a></label>
</div>
<button type="submit" class="btn btn-primary">Register</button>
<button type="submit" class="btn btn-primary" id="registerButton">Register</button>
</form>
</div>
</div>
</div>
</div>
{{#section 'scripts'}}
<script>
$('#registerButton').click(function(e) {
e.preventDefault();
const email = $('#email').val();
const username = $('#username').val();
const password = $('#password').val();
const confirm_password = $('#confirm_password').val();
const terms = $('#terms').is(':checked');
if (password !== confirm_password) {
Swal.fire({
title: 'Error',
text: 'Passwords do not match',
icon: 'error'
});
return;
}
if (!terms) {
Swal.fire({
title: 'Error',
text: 'You must agree to the Terms of Service and Privacy Policy',
icon: 'error'
});
return;
}
console.log(email, username, password);
$.ajax({
url: '/auth/register',
type: 'POST',
data: { email, username, password },
success: function(response) {
Swal.fire({
title: 'Success',
text: `You have been registered successfully!<br>
Your invite code is only good for 12 hours.
<br>Please check your email for verification.
<br>Check your spam folder if you don't see it in your inbox.`,
icon: 'success'
});
},
error: function(xhr) {
const errorMessage = xhr.responseJSON?.error || 'An error occurred while registering. Please try again.';
Swal.fire({
title: 'Error',
text: errorMessage,
icon: 'error'
});
}
});
});
</script>
{{/section}}

View File

@ -2,7 +2,7 @@
<h1 class="display-4 fw-bold">2weekmail</h1>
<p class="lead">Temporary email service that lasts for two weeks</p>
<div class="d-flex justify-content-center mt-4">
<a href="/api-docs" class="btn btn-primary btn-lg me-3">
<a href="/auth/register" class="btn btn-primary btn-lg me-3">
<i class="fas fa-code me-2"></i>Get API Key
</a>
<a href="#features" class="btn btn-outline-light btn-lg">

View File

@ -16,5 +16,7 @@
<script src="/js/bootstrap.bundle.min.js"></script>
<script src="/js/all.min.js"></script>
<script src="/js/chart.min.js"></script>
<script src="/js/sweetalert2.js"></script>
{{{_sections.scripts}}}
</body>
</html>

View File

@ -1,11 +1,20 @@
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="2weekmail is a temporary email service that allows you to send and receive emails for a limited time.">
<meta name="keywords" content="temporary email, disposable email, temporary email service, disposable email service, temporary email address, disposable email address, temporary email service, disposable email service, temporary email address, disposable email address">
<link rel="icon" href="/img/favicon.ico" type="image/x-icon">
<meta name="author" content="2weekmail">
<meta name="robots" content="index, follow">
<meta name="googlebot" content="index, follow">
<meta name="bingbot" content="index, follow">
<meta name="yandexbot" content="index, follow">
<title>2weekmail - Temporary Email Service</title>
<!-- Bootstrap 5 CSS -->
<link href="/css/bootstrap.min.css" rel="stylesheet">
<!-- Font Awesome for icons -->
<link rel="stylesheet" href="/css/all.min.css">
<link rel="stylesheet" href="/css/sweetalert2.min.css">
<style>
body {
min-height: 100vh;

View File

@ -21,6 +21,9 @@
<li class="nav-item">
<a class="nav-link" href="/about"><i class="fas fa-info-circle me-1"></i> About</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/auth/register"><i class="fas fa-user-plus me-1"></i> Register</a>
</li>
</ul>
</div>
</div>

View File

@ -116,9 +116,9 @@
document.addEventListener('DOMContentLoaded', function() {
// Fetch all stats data
Promise.all([
fetch('https://api.2weekmail.test/stats/system').then(res => res.json()),
fetch('https://api.2weekmail.test/stats/mailboxes').then(res => res.json()),
fetch('https://api.2weekmail.test/stats/domains').then(res => res.json())
fetch('https://api.2weekmail.fyi/stats/system').then(res => res.json()),
fetch('https://api.2weekmail.fyi/stats/mailboxes').then(res => res.json()),
fetch('https://api.2weekmail.fyi/stats/domains').then(res => res.json())
])
.then(([systemStats, mailboxStats, domainStats]) => {
// Update overview cards

View File

@ -2,13 +2,25 @@ const express = require('express');
const exphbs = require('express-handlebars');
const path = require('path');
const helmet = require('helmet');
const app = express();
const cors = require('cors');
const app = express();
require('dotenv').config();
const PORT = process.env.WEB_PORT || 3350;
const IP = process.env.IP || '0.0.0.0';
const webAuthRoutes = require('./routes/webAuth');
app.use(cors());
const corsOptions = {
origin: [
'https://2weekmail.fyi',
'http://localhost:3350'
],
methods: ['GET', 'POST', 'PUT', 'DELETE'],
credentials: true
};
app.use(cors(corsOptions));
app.use(express.json());
app.use(express.urlencoded({ extended: true }));
// Configure Helmet with custom CSP
// app.use(helmet({
@ -32,7 +44,14 @@ app.engine('hbs', exphbs.engine({
extname: '.hbs',
defaultLayout: 'main',
layoutsDir: path.join(__dirname, 'views/layouts'),
partialsDir: path.join(__dirname, 'views/partials')
partialsDir: path.join(__dirname, 'views/partials'),
helpers: {
section: function (name, options) {
if (!this._sections) this._sections = {};
this._sections[name] = options.fn(this);
return null;
},
}
}));
app.set('view engine', 'hbs');
app.set('views', path.join(__dirname, 'views'));
@ -46,6 +65,8 @@ app.get('/', (req, res) => {
res.render('home');
});
app.use('/auth', webAuthRoutes);
app.get('/stats', (req, res) => {
res.render('stats');
});
@ -54,15 +75,6 @@ app.get('/about', (req, res) => {
res.render('about');
});
// Auth routes (if needed)
app.get('/auth/register', (req, res) => {
res.render('auth/register');
});
app.get('/auth/activate', (req, res) => {
res.render('auth/activate');
});
app.get('/privacy', (req, res) => {
res.render('policies/privacy');
});

217
backup.sh Executable file
View File

@ -0,0 +1,217 @@
#!/bin/bash
# Load environment variables from .env file
if [ -f .env ]; then
# Read each line from .env and export variables
while IFS= read -r line || [[ -n "$line" ]]; do
# Skip comments and empty lines
[[ $line =~ ^#.*$ ]] && continue
[[ -z "$line" ]] && continue
# Export the variable
export "$line"
done < .env
else
echo "Error: .env file not found"
exit 1
fi
# Variables (now using environment variables with defaults)
MYSQL_USER="admin"
MYSQL_PASSWORD=${MYSQL_ADMIN_PASSWORD}
BACKUP_DIR="$(pwd)/backups"
REMOTE_HOST="${BACKUP_SERVER_HOST}" # Remote server hostname/IP
REMOTE_USER="${BACKUP_SERVER_USER}" # Remote server username
REMOTE_BACKUP_DIR="${BACKUP_SERVER_PATH}" # Remote server backup path
TEMP_DIR="$BACKUP_DIR/tmp"
TIMESTAMP=$(date +"%Y-%m-%d_%H-%M-%S")
MODE=""
HOURLY_RETENTION=4
DAILY_RETENTION=3
WEEKLY_RETENTION=1
MIN_SPACE=20 # Minimum space in GB
DB_CONTAINER="mailserver_db"
# Function to display usage
function usage() {
echo "Usage: $0 --hourly | --daily | --weekly"
exit 1
}
# Function to check available space and cleanup if necessary
function check_and_cleanup_space() {
local available_space=$(df -BG "$REMOTE_BACKUP_DIR" | awk 'NR==2 {gsub("G","",$4); print $4}')
if [ "$available_space" -le "$MIN_SPACE" ]; then
echo "Available space ($available_space GB) is less than minimum required ($MIN_SPACE GB)"
echo "Starting cleanup..."
while [ "$available_space" -le "$MIN_SPACE" ]; do
# Find oldest backup file
oldest_file=$(find "$REMOTE_BACKUP_DIR" -type f -name "*.zip" -printf '%T+ %p\n' | sort | head -n 1 | awk '{print $2}')
if [ -z "$oldest_file" ]; then
echo "No more files to delete!"
break
fi
# Get file size before deletion for logging
file_size=$(du -h "$oldest_file" | cut -f1)
# Delete the file
rm -f "$oldest_file"
echo "Deleted old backup: $oldest_file (Size: $file_size)"
# Recalculate available space
available_space=$(df -BG "$REMOTE_BACKUP_DIR" | awk 'NR==2 {gsub("G","",$4); print $4}')
done
echo "Cleanup complete. Available space: $available_space GB"
fi
}
echo "##########################"
echo "Starting backup..."
echo "##########################"
echo ""
# Check if the correct parameter is passed
if [[ "$1" == "--hourly" ]]; then
MODE="hourly"
elif [[ "$1" == "--daily" ]]; then
MODE="daily"
elif [[ "$1" == "--weekly" ]]; then
MODE="weekly"
else
usage
fi
echo "##########################"
echo "Mode: $MODE"
echo "##########################"
echo ""
echo "##########################"
echo "Creating backup directories..."
echo "##########################"
echo ""
# Create backup and temp directories if they don't exist
mkdir -p "$BACKUP_DIR"
mkdir -p "$TEMP_DIR"
mkdir -p "/var/log/backup"
# Backup all MySQL databases
echo "##########################"
echo "Backing up MySQL databases..."
echo "##########################"
echo ""
# Test database connection first
if ! docker exec -i $DB_CONTAINER mariadb -u $MYSQL_USER -p${MYSQL_PASSWORD} -e "SELECT 1;" >/dev/null 2>&1; then
echo "Error: Cannot connect to MySQL database. Please check credentials."
echo "Container: $DB_CONTAINER"
echo "User: $MYSQL_USER"
echo "Password being used: ${MYSQL_PASSWORD}"
exit 1
fi
# Get databases list without using -it flag (which requires terminal)
databases=$(docker exec -i $DB_CONTAINER mariadb -u $MYSQL_USER -p${MYSQL_PASSWORD} -e "SHOW DATABASES;" | grep -Ev "(Database|information_schema|performance_schema|mysql|sys)")
for db in $databases; do
echo "Backing up database: $db"
docker exec -i $DB_CONTAINER mariadb-dump -u $MYSQL_USER -p${MYSQL_PASSWORD} --databases "$db" > "$TEMP_DIR/${db}.sql"
if [ $? -eq 0 ]; then
zip -j "$TEMP_DIR/${db}.sql.zip" "$TEMP_DIR/${db}.sql"
rm "$TEMP_DIR/${db}.sql"
else
echo "Error backing up database: $db"
fi
done
DIRECTORIES_TO_BACKUP=(
"/opt/2weekmail"
)
for dir in "${DIRECTORIES_TO_BACKUP[@]}"; do
DIR_NAME=$(basename "$dir")
zip -r "$TEMP_DIR/${DIR_NAME}_${MODE}_$TIMESTAMP.zip" "$dir" -x "*/node_modules/*" "*/backups/*"
done
# Compress all SQL and directories into a single zip file
echo "##########################"
echo "Compressing backup files..."
echo "##########################"
echo ""
FINAL_BACKUP_FILE="$BACKUP_DIR/${MODE}_backup_$TIMESTAMP.zip"
find "$TEMP_DIR" -name "*.zip" | while read file; do
zip -ur "$FINAL_BACKUP_FILE" "$file"
done
# Clean up temporary files
rm -rf "$TEMP_DIR"
echo "##########################"
echo "Backup complete: $FINAL_BACKUP_FILE"
echo "##########################"
echo ""
function apply_retention() {
backup_type=$1
retention_count=$2
# Find all backup files of the specified type, sort them by modification time, and keep the newest
backups=($(ls -t $BACKUP_DIR/${backup_type}_backup_*.zip))
# If the number of backups exceeds the retention limit, delete the older ones
if [ ${#backups[@]} -gt $retention_count ]; then
delete_count=$((${#backups[@]} - $retention_count))
for (( i=$retention_count; i<${#backups[@]}; i++ )); do
rm -f "${backups[$i]}"
echo "Deleted old $backup_type backup: ${backups[$i]}"
done
fi
}
echo "##########################"
echo "Applying retention policy..."
echo "##########################"
echo ""
# Apply retention policy
if [[ "$MODE" == "hourly" ]]; then
apply_retention "hourly" $HOURLY_RETENTION
elif [[ "$MODE" == "daily" ]]; then
apply_retention "daily" $DAILY_RETENTION
elif [[ "$MODE" == "weekly" ]]; then
apply_retention "weekly" $WEEKLY_RETENTION
fi
echo "Retention policy applied: $MODE backups cleaned."
echo "##########################"
echo "Backup cleanup complete."
echo "##########################"
echo ""
echo "##########################"
echo "Checking remote backup space..."
echo "##########################"
echo ""
# Check and cleanup space before copying new backup
check_and_cleanup_space
# Use rsync to copy the backup
if ! ssh -p "$BACKUP_SERVER_PORT" "${REMOTE_USER}@${REMOTE_HOST}" exit 2>/dev/null; then
echo "Error: Cannot connect to remote backup server"
echo "Backup file is saved locally at: $FINAL_BACKUP_FILE"
exit 1
fi
rsync -av --progress -e "ssh -p $BACKUP_SERVER_PORT" "$FINAL_BACKUP_FILE" "${REMOTE_USER}@${REMOTE_HOST}:${REMOTE_BACKUP_DIR}/"
echo "##########################"
echo "Remote backup complete"
echo "##########################"
echo ""

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.