Production-ready integrations for popular frameworks and platforms
const crypto = require('crypto');
const express = require('express');
// SHA-224 Request Signature Middleware
class SHA224Middleware {
constructor(secretKey) {
this.secretKey = secretKey;
}
// Validate request signatures
validateSignature() {
return (req, res, next) => {
const signature = req.headers['x-signature'];
const timestamp = req.headers['x-timestamp'];
if (!signature || !timestamp) {
return res.status(401).json({ error: 'Missing signature headers' });
}
// Check timestamp (5 minute window)
const now = Date.now();
if (Math.abs(now - parseInt(timestamp)) > 5 * 60 * 1000) {
return res.status(401).json({ error: 'Request expired' });
}
// Compute expected signature
const payload = JSON.stringify({
method: req.method,
path: req.path,
query: req.query,
body: req.body,
timestamp
});
const expectedSignature = crypto
.createHmac('sha224', this.secretKey)
.update(payload)
.digest('hex');
if (!crypto.timingSafeEqual(
Buffer.from(signature),
Buffer.from(expectedSignature)
)) {
return res.status(401).json({ error: 'Invalid signature' });
}
next();
};
}
// Hash sensitive data before storage
hashSensitiveData() {
return (req, res, next) => {
if (req.body && req.body.sensitive) {
for (const field of req.body.sensitive) {
if (req.body[field]) {
req.body[field + '_hash'] = crypto
.createHash('sha224')
.update(req.body[field])
.digest('hex');
delete req.body[field];
}
}
}
next();
};
}
// Session fingerprinting
sessionFingerprint() {
return (req, res, next) => {
const fingerprint = crypto
.createHash('sha224')
.update(req.headers['user-agent'] || '')
.update(req.ip)
.update(req.headers['accept-language'] || '')
.digest('hex');
req.sessionFingerprint = fingerprint;
// Validate existing session
if (req.session && req.session.fingerprint) {
if (req.session.fingerprint !== fingerprint) {
req.session.destroy();
return res.status(401).json({ error: 'Session invalid' });
}
} else if (req.session) {
req.session.fingerprint = fingerprint;
}
next();
};
}
// File upload integrity check
fileIntegrityCheck() {
return async (req, res, next) => {
if (!req.files || req.files.length === 0) {
return next();
}
for (const file of req.files) {
const hash = crypto.createHash('sha224');
hash.update(file.buffer);
file.sha224 = hash.digest('hex');
// Verify against provided checksum if exists
if (req.body[`${file.fieldname}_checksum`]) {
if (file.sha224 !== req.body[`${file.fieldname}_checksum`]) {
return res.status(400).json({
error: `Checksum mismatch for ${file.fieldname}`
});
}
}
}
next();
};
}
}
// Usage
const app = express();
const sha224 = new SHA224Middleware(process.env.SECRET_KEY);
// Apply middleware
app.use(express.json());
app.use(sha224.sessionFingerprint());
// Protected routes
app.use('/api/secure', sha224.validateSignature());
// File upload with integrity check
const multer = require('multer');
const upload = multer({ storage: multer.memoryStorage() });
app.post('/upload',
upload.array('files'),
sha224.fileIntegrityCheck(),
(req, res) => {
res.json({
success: true,
files: req.files.map(f => ({
name: f.originalname,
size: f.size,
sha224: f.sha224
}))
});
}
);
module.exports = SHA224Middleware;
// routes/auth.js
const express = require('express');
const crypto = require('crypto');
const router = express.Router();
// User registration with SHA-224 verification
router.post('/register', async (req, res) => {
const { username, email, password } = req.body;
// Generate verification token
const verificationToken = crypto
.createHash('sha224')
.update(email + Date.now() + Math.random())
.digest('hex');
// Store user with hashed password (use bcrypt in production!)
const user = {
username,
email,
emailHash: crypto.createHash('sha224').update(email).digest('hex'),
verificationToken,
verified: false,
createdAt: new Date()
};
await db.users.create(user);
// Send verification email
await sendVerificationEmail(email, verificationToken);
res.json({ success: true, message: 'Please check your email' });
});
// Email verification
router.get('/verify/:token', async (req, res) => {
const { token } = req.params;
// Validate token format (SHA-224 produces 56 hex chars)
if (!/^[a-f0-9]{56}$/.test(token)) {
return res.status(400).json({ error: 'Invalid token format' });
}
const user = await db.users.findOne({ verificationToken: token });
if (!user) {
return res.status(404).json({ error: 'Invalid verification token' });
}
user.verified = true;
user.verificationToken = null;
await user.save();
res.json({ success: true, message: 'Email verified successfully' });
});
// API key generation
router.post('/api-key', authenticate, async (req, res) => {
const userId = req.user.id;
// Generate unique API key using SHA-224
const apiKey = crypto
.createHash('sha224')
.update(userId + Date.now() + crypto.randomBytes(32))
.digest('hex');
// Store hashed version of API key
const hashedKey = crypto
.createHash('sha224')
.update(apiKey)
.digest('hex');
await db.apiKeys.create({
userId,
keyHash: hashedKey,
lastUsed: null,
createdAt: new Date()
});
// Return the API key only once
res.json({
apiKey,
message: 'Store this key securely. It won\'t be shown again.'
});
});
// Content integrity verification
router.post('/content/verify', async (req, res) => {
const { contentId, expectedHash } = req.body;
const content = await db.content.findById(contentId);
if (!content) {
return res.status(404).json({ error: 'Content not found' });
}
const actualHash = crypto
.createHash('sha224')
.update(content.data)
.digest('hex');
const isValid = crypto.timingSafeEqual(
Buffer.from(expectedHash),
Buffer.from(actualHash)
);
res.json({
valid: isValid,
contentId,
timestamp: new Date()
});
});
module.exports = router;
// app.js - Complete Express application with SHA-224
const express = require('express');
const crypto = require('crypto');
const helmet = require('helmet');
const rateLimit = require('express-rate-limit');
const mongoose = require('mongoose');
const app = express();
// Security middleware
app.use(helmet());
app.use(express.json());
// Rate limiting with SHA-224 based client identification
const createRateLimiter = (windowMs, max) => {
return rateLimit({
windowMs,
max,
keyGenerator: (req) => {
// Create unique client identifier
return crypto
.createHash('sha224')
.update(req.ip)
.update(req.headers['user-agent'] || '')
.digest('hex')
.substring(0, 16);
},
handler: (req, res) => {
res.status(429).json({
error: 'Too many requests',
retryAfter: req.rateLimit.resetTime
});
}
});
};
// Apply rate limiting
app.use('/api/', createRateLimiter(15 * 60 * 1000, 100)); // 100 requests per 15 minutes
// Database schema with SHA-224 checksums
const FileSchema = new mongoose.Schema({
filename: String,
path: String,
size: Number,
sha224: {
type: String,
required: true,
match: /^[a-f0-9]{56}$/
},
uploadedBy: mongoose.Schema.Types.ObjectId,
uploadedAt: { type: Date, default: Date.now }
});
// Pre-save hook to compute SHA-224
FileSchema.pre('save', async function(next) {
if (this.isNew) {
const fileContent = await readFile(this.path);
this.sha224 = crypto
.createHash('sha224')
.update(fileContent)
.digest('hex');
}
next();
});
const File = mongoose.model('File', FileSchema);
// Audit logging with SHA-224 integrity
class AuditLogger {
constructor() {
this.logs = [];
}
log(event) {
const entry = {
timestamp: Date.now(),
...event
};
// Add integrity hash
entry.hash = crypto
.createHash('sha224')
.update(JSON.stringify(entry))
.digest('hex');
// Chain with previous log hash
if (this.logs.length > 0) {
const prevHash = this.logs[this.logs.length - 1].hash;
entry.prevHash = prevHash;
entry.chainHash = crypto
.createHash('sha224')
.update(prevHash + entry.hash)
.digest('hex');
}
this.logs.push(entry);
this.persistLog(entry);
}
async persistLog(entry) {
await db.auditLogs.create(entry);
}
async verifyIntegrity() {
for (let i = 1; i < this.logs.length; i++) {
const current = this.logs[i];
const prev = this.logs[i - 1];
const expectedChainHash = crypto
.createHash('sha224')
.update(prev.hash + current.hash)
.digest('hex');
if (current.chainHash !== expectedChainHash) {
return {
valid: false,
brokenAt: i,
entry: current
};
}
}
return { valid: true };
}
}
const auditLogger = new AuditLogger();
// Middleware to log all API calls
app.use('/api', (req, res, next) => {
const startTime = Date.now();
res.on('finish', () => {
auditLogger.log({
method: req.method,
path: req.path,
ip: req.ip,
statusCode: res.statusCode,
duration: Date.now() - startTime,
userAgent: req.headers['user-agent']
});
});
next();
});
// Health check with integrity verification
app.get('/health', async (req, res) => {
const checks = {
server: 'ok',
database: 'checking',
integrity: 'checking'
};
// Check database
try {
await mongoose.connection.db.admin().ping();
checks.database = 'ok';
} catch (error) {
checks.database = 'error';
}
// Check audit log integrity
const integrity = await auditLogger.verifyIntegrity();
checks.integrity = integrity.valid ? 'ok' : 'compromised';
// Compute health check signature
const signature = crypto
.createHash('sha224')
.update(JSON.stringify(checks))
.update(Date.now().toString())
.digest('hex');
res.json({
...checks,
timestamp: Date.now(),
signature
});
});
// Error handling
app.use((err, req, res, next) => {
// Log error with SHA-224 reference
const errorRef = crypto
.createHash('sha224')
.update(err.stack + Date.now())
.digest('hex')
.substring(0, 16);
console.error(`Error [${errorRef}]:`, err);
res.status(500).json({
error: 'Internal server error',
reference: errorRef
});
});
// Start server
const PORT = process.env.PORT || 3000;
app.listen(PORT, () => {
console.log(`SHA-224 secured server running on port ${PORT}`);
});
# middleware/sha224_middleware.py
import hashlib
import hmac
import json
import time
from django.http import JsonResponse
from django.utils.deprecation import MiddlewareMixin
from django.conf import settings
class SHA224SecurityMiddleware(MiddlewareMixin):
"""SHA-224 based security middleware for Django"""
def process_request(self, request):
# Session fingerprinting
fingerprint_data = f"{request.META.get('HTTP_USER_AGENT', '')}"
fingerprint_data += f"{request.META.get('REMOTE_ADDR', '')}"
fingerprint_data += f"{request.META.get('HTTP_ACCEPT_LANGUAGE', '')}"
fingerprint = hashlib.sha224(fingerprint_data.encode()).hexdigest()
request.session_fingerprint = fingerprint
# Validate existing session
if request.session.get('fingerprint'):
if request.session['fingerprint'] != fingerprint:
request.session.flush()
return JsonResponse({'error': 'Session invalid'}, status=401)
else:
request.session['fingerprint'] = fingerprint
# API signature validation for protected endpoints
if request.path.startswith('/api/secure/'):
return self.validate_signature(request)
def validate_signature(self, request):
signature = request.META.get('HTTP_X_SIGNATURE')
timestamp = request.META.get('HTTP_X_TIMESTAMP')
if not signature or not timestamp:
return JsonResponse({'error': 'Missing signature headers'}, status=401)
# Check timestamp (5 minute window)
current_time = int(time.time() * 1000)
if abs(current_time - int(timestamp)) > 5 * 60 * 1000:
return JsonResponse({'error': 'Request expired'}, status=401)
# Compute expected signature
payload = {
'method': request.method,
'path': request.path,
'timestamp': timestamp
}
if request.body:
payload['body'] = request.body.decode('utf-8')
expected_signature = hmac.new(
settings.SECRET_KEY.encode(),
json.dumps(payload, sort_keys=True).encode(),
hashlib.sha224
).hexdigest()
if not hmac.compare_digest(signature, expected_signature):
return JsonResponse({'error': 'Invalid signature'}, status=401)
def process_response(self, request, response):
# Add response integrity header
if hasattr(response, 'content'):
content_hash = hashlib.sha224(response.content).hexdigest()
response['X-Content-SHA224'] = content_hash
return response
# models.py
from django.db import models
import hashlib
class SecureFile(models.Model):
"""File model with SHA-224 integrity checking"""
filename = models.CharField(max_length=255)
file_path = models.FileField(upload_to='secure_files/')
sha224_hash = models.CharField(max_length=56, editable=False)
uploaded_by = models.ForeignKey('auth.User', on_delete=models.CASCADE)
uploaded_at = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
if self.file_path:
# Calculate SHA-224 hash of file
sha224 = hashlib.sha224()
for chunk in self.file_path.chunks():
sha224.update(chunk)
self.sha224_hash = sha224.hexdigest()
super().save(*args, **kwargs)
def verify_integrity(self):
"""Verify file integrity"""
sha224 = hashlib.sha224()
for chunk in self.file_path.chunks():
sha224.update(chunk)
current_hash = sha224.hexdigest()
return current_hash == self.sha224_hash
# views.py
from django.views import View
from django.http import JsonResponse
from django.contrib.auth.mixins import LoginRequiredMixin
import hashlib
import secrets
class APIKeyView(LoginRequiredMixin, View):
"""Generate and manage API keys with SHA-224"""
def post(self, request):
# Generate unique API key
raw_key = f"{request.user.id}{secrets.token_hex(32)}"
api_key = hashlib.sha224(raw_key.encode()).hexdigest()
# Store hashed version
key_hash = hashlib.sha224(api_key.encode()).hexdigest()
APIKey.objects.create(
user=request.user,
key_hash=key_hash,
name=request.POST.get('name', 'Default')
)
return JsonResponse({
'api_key': api_key,
'message': 'Store this key securely. It cannot be retrieved again.'
})
# management/commands/verify_integrity.py
from django.core.management.base import BaseCommand
from myapp.models import SecureFile
class Command(BaseCommand):
help = 'Verify integrity of all files using SHA-224'
def handle(self, *args, **options):
files = SecureFile.objects.all()
corrupted = []
for file in files:
if not file.verify_integrity():
corrupted.append(file.id)
self.stdout.write(
self.style.ERROR(f'File {file.filename} is corrupted!')
)
if not corrupted:
self.stdout.write(
self.style.SUCCESS('All files passed integrity check')
)
else:
self.stdout.write(
self.style.ERROR(f'{len(corrupted)} files are corrupted')
)
-- Enable pgcrypto extension
CREATE EXTENSION IF NOT EXISTS pgcrypto;
-- Create table with SHA-224 hash columns
CREATE TABLE secure_documents (
id SERIAL PRIMARY KEY,
filename VARCHAR(255) NOT NULL,
content TEXT NOT NULL,
content_hash VARCHAR(56) NOT NULL,
metadata JSONB,
metadata_hash VARCHAR(56),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
integrity_verified BOOLEAN DEFAULT TRUE
);
-- Function to compute SHA-224 hash
CREATE OR REPLACE FUNCTION compute_sha224(input_text TEXT)
RETURNS VARCHAR(56) AS $$
BEGIN
RETURN encode(digest(input_text, 'sha224'), 'hex');
END;
$$ LANGUAGE plpgsql IMMUTABLE;
-- Trigger to automatically compute hashes
CREATE OR REPLACE FUNCTION update_document_hashes()
RETURNS TRIGGER AS $$
BEGIN
-- Compute content hash
NEW.content_hash := compute_sha224(NEW.content);
-- Compute metadata hash if metadata exists
IF NEW.metadata IS NOT NULL THEN
NEW.metadata_hash := compute_sha224(NEW.metadata::TEXT);
END IF;
-- Update timestamp
NEW.updated_at := CURRENT_TIMESTAMP;
-- Verify integrity on update
IF TG_OP = 'UPDATE' THEN
IF OLD.content_hash != compute_sha224(OLD.content) THEN
NEW.integrity_verified := FALSE;
RAISE WARNING 'Integrity check failed for document %', NEW.id;
END IF;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
CREATE TRIGGER document_hash_trigger
BEFORE INSERT OR UPDATE ON secure_documents
FOR EACH ROW
EXECUTE FUNCTION update_document_hashes();
-- Table for audit logging with hash chain
CREATE TABLE audit_log (
id SERIAL PRIMARY KEY,
event_type VARCHAR(50) NOT NULL,
table_name VARCHAR(100),
record_id INTEGER,
user_id INTEGER,
event_data JSONB,
event_hash VARCHAR(56) NOT NULL,
prev_hash VARCHAR(56),
chain_hash VARCHAR(56),
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
);
-- Function for tamper-proof audit logging
CREATE OR REPLACE FUNCTION create_audit_log(
p_event_type VARCHAR,
p_table_name VARCHAR,
p_record_id INTEGER,
p_user_id INTEGER,
p_event_data JSONB
) RETURNS VOID AS $$
DECLARE
v_event_hash VARCHAR(56);
v_prev_hash VARCHAR(56);
v_chain_hash VARCHAR(56);
BEGIN
-- Compute hash of event data
v_event_hash := compute_sha224(
p_event_type || p_table_name ||
p_record_id::TEXT || p_user_id::TEXT ||
p_event_data::TEXT || CURRENT_TIMESTAMP::TEXT
);
-- Get previous hash for chaining
SELECT event_hash INTO v_prev_hash
FROM audit_log
ORDER BY id DESC
LIMIT 1;
-- Compute chain hash
IF v_prev_hash IS NOT NULL THEN
v_chain_hash := compute_sha224(v_prev_hash || v_event_hash);
ELSE
v_chain_hash := v_event_hash;
END IF;
-- Insert audit log
INSERT INTO audit_log (
event_type, table_name, record_id, user_id,
event_data, event_hash, prev_hash, chain_hash
) VALUES (
p_event_type, p_table_name, p_record_id, p_user_id,
p_event_data, v_event_hash, v_prev_hash, v_chain_hash
);
END;
$$ LANGUAGE plpgsql;
-- Verify audit log integrity
CREATE OR REPLACE FUNCTION verify_audit_integrity()
RETURNS TABLE (
is_valid BOOLEAN,
invalid_at INTEGER,
message TEXT
) AS $$
DECLARE
r RECORD;
v_expected_chain VARCHAR(56);
v_prev_hash VARCHAR(56) := NULL;
BEGIN
FOR r IN SELECT * FROM audit_log ORDER BY id LOOP
IF v_prev_hash IS NOT NULL THEN
v_expected_chain := compute_sha224(v_prev_hash || r.event_hash);
IF r.chain_hash != v_expected_chain THEN
RETURN QUERY SELECT
FALSE,
r.id,
'Chain hash mismatch at ID ' || r.id;
RETURN;
END IF;
END IF;
v_prev_hash := r.event_hash;
END LOOP;
RETURN QUERY SELECT TRUE, NULL::INTEGER, 'Audit log integrity verified';
END;
$$ LANGUAGE plpgsql;
-- Index for efficient hash lookups
CREATE INDEX idx_content_hash ON secure_documents(content_hash);
CREATE INDEX idx_audit_event_hash ON audit_log(event_hash);
CREATE INDEX idx_audit_chain_hash ON audit_log(chain_hash);
-- Example usage
INSERT INTO secure_documents (filename, content)
VALUES ('contract.pdf', 'Binary content here...');
-- Verify specific document
SELECT filename,
content_hash,
compute_sha224(content) = content_hash AS is_valid
FROM secure_documents
WHERE id = 1;
const mongoose = require('mongoose');
const crypto = require('crypto');
const GridFSBucket = require('mongodb').GridFSBucket;
// Schema with SHA-224 integrity
const DocumentSchema = new mongoose.Schema({
title: String,
content: String,
contentHash: {
type: String,
required: true,
match: /^[a-f0-9]{56}$/
},
author: {
type: mongoose.Schema.Types.ObjectId,
ref: 'User'
},
versions: [{
versionNumber: Number,
content: String,
hash: String,
createdAt: Date
}],
metadata: mongoose.Schema.Types.Mixed,
metadataHash: String,
createdAt: { type: Date, default: Date.now },
updatedAt: { type: Date, default: Date.now }
});
// Pre-save middleware for hashing
DocumentSchema.pre('save', function(next) {
// Compute content hash
this.contentHash = crypto
.createHash('sha224')
.update(this.content)
.digest('hex');
// Compute metadata hash if exists
if (this.metadata) {
this.metadataHash = crypto
.createHash('sha224')
.update(JSON.stringify(this.metadata))
.digest('hex');
}
// Add to version history
if (this.isModified('content')) {
this.versions.push({
versionNumber: this.versions.length + 1,
content: this.content,
hash: this.contentHash,
createdAt: new Date()
});
}
this.updatedAt = new Date();
next();
});
// Instance method to verify integrity
DocumentSchema.methods.verifyIntegrity = function() {
const computedHash = crypto
.createHash('sha224')
.update(this.content)
.digest('hex');
return computedHash === this.contentHash;
};
// Static method for bulk integrity check
DocumentSchema.statics.verifyAllIntegrity = async function() {
const documents = await this.find({});
const results = {
total: documents.length,
valid: 0,
corrupted: []
};
for (const doc of documents) {
if (doc.verifyIntegrity()) {
results.valid++;
} else {
results.corrupted.push(doc._id);
}
}
return results;
};
const Document = mongoose.model('Document', DocumentSchema);
// GridFS integration for large files with SHA-224
class SecureGridFS {
constructor(db) {
this.bucket = new GridFSBucket(db, {
bucketName: 'secureFiles'
});
}
async uploadFile(filename, stream, metadata = {}) {
const uploadStream = this.bucket.openUploadStream(filename, {
metadata
});
const sha224 = crypto.createHash('sha224');
return new Promise((resolve, reject) => {
stream
.on('data', chunk => sha224.update(chunk))
.pipe(uploadStream)
.on('finish', async () => {
const fileHash = sha224.digest('hex');
// Update file metadata with hash
await this.bucket.s.db
.collection('secureFiles.files')
.updateOne(
{ _id: uploadStream.id },
{
$set: {
'metadata.sha224': fileHash,
'metadata.uploadedAt': new Date()
}
}
);
resolve({
fileId: uploadStream.id,
filename,
sha224: fileHash
});
})
.on('error', reject);
});
}
async downloadFileWithVerification(fileId) {
const file = await this.bucket.s.db
.collection('secureFiles.files')
.findOne({ _id: fileId });
if (!file) {
throw new Error('File not found');
}
const expectedHash = file.metadata.sha224;
const downloadStream = this.bucket.openDownloadStream(fileId);
const sha224 = crypto.createHash('sha224');
return new Promise((resolve, reject) => {
const chunks = [];
downloadStream
.on('data', chunk => {
chunks.push(chunk);
sha224.update(chunk);
})
.on('end', () => {
const actualHash = sha224.digest('hex');
if (actualHash !== expectedHash) {
reject(new Error('File integrity check failed'));
} else {
resolve({
data: Buffer.concat(chunks),
verified: true,
sha224: actualHash
});
}
})
.on('error', reject);
});
}
}
// Aggregation pipeline for hash-based analytics
async function hashAnalytics() {
return await Document.aggregate([
{
$group: {
_id: '$contentHash',
count: { $sum: 1 },
documents: { $push: '$_id' }
}
},
{
$match: {
count: { $gt: 1 } // Find duplicate content
}
},
{
$project: {
hash: '$_id',
duplicateCount: '$count',
documentIds: '$documents'
}
}
]);
}
module.exports = { Document, SecureGridFS, hashAnalytics };
# lambda_function.py - AWS Lambda for SHA-224 processing
import json
import hashlib
import boto3
import base64
from datetime import datetime
s3 = boto3.client('s3')
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('FileHashes')
def lambda_handler(event, context):
"""
AWS Lambda function for SHA-224 file processing
Triggered by S3 uploads
"""
for record in event['Records']:
# Get S3 object details
bucket = record['s3']['bucket']['name']
key = record['s3']['object']['key']
try:
# Download file from S3
response = s3.get_object(Bucket=bucket, Key=key)
file_content = response['Body'].read()
# Calculate SHA-224 hash
sha224_hash = hashlib.sha224(file_content).hexdigest()
# Store hash in DynamoDB
table.put_item(
Item={
'FileKey': key,
'Bucket': bucket,
'SHA224': sha224_hash,
'FileSize': len(file_content),
'ProcessedAt': datetime.now().isoformat(),
'ContentType': response.get('ContentType', 'unknown')
}
)
# Add hash as S3 object tag
s3.put_object_tagging(
Bucket=bucket,
Key=key,
Tagging={
'TagSet': [
{'Key': 'SHA224', 'Value': sha224_hash},
{'Key': 'Verified', 'Value': 'true'}
]
}
)
# If configured, send to SQS for further processing
if 'SQS_QUEUE_URL' in os.environ:
sqs = boto3.client('sqs')
sqs.send_message(
QueueUrl=os.environ['SQS_QUEUE_URL'],
MessageBody=json.dumps({
'bucket': bucket,
'key': key,
'sha224': sha224_hash,
'timestamp': datetime.now().isoformat()
})
)
print(f"Processed {key}: SHA-224 = {sha224_hash}")
except Exception as e:
print(f"Error processing {key}: {str(e)}")
raise
return {
'statusCode': 200,
'body': json.dumps('Processing complete')
}
# serverless.yml - Serverless Framework configuration
service: sha224-processor
provider:
name: aws
runtime: python3.9
region: us-east-1
environment:
HASH_TABLE: ${self:custom.hashTable}
functions:
processFile:
handler: lambda_function.lambda_handler
events:
- s3:
bucket: ${self:custom.bucketName}
event: s3:ObjectCreated:*
iamRoleStatements:
- Effect: Allow
Action:
- s3:GetObject
- s3:PutObjectTagging
Resource: "arn:aws:s3:::${self:custom.bucketName}/*"
- Effect: Allow
Action:
- dynamodb:PutItem
- dynamodb:GetItem
Resource: "arn:aws:dynamodb:*:*:table/${self:custom.hashTable}"
resources:
Resources:
HashTable:
Type: AWS::DynamoDB::Table
Properties:
TableName: ${self:custom.hashTable}
AttributeDefinitions:
- AttributeName: FileKey
AttributeType: S
- AttributeName: SHA224
AttributeType: S
KeySchema:
- AttributeName: FileKey
KeyType: HASH
GlobalSecondaryIndexes:
- IndexName: SHA224Index
KeySchema:
- AttributeName: SHA224
KeyType: HASH
Projection:
ProjectionType: ALL
ProvisionedThroughput:
ReadCapacityUnits: 5
WriteCapacityUnits: 5
BillingMode: PAY_PER_REQUEST
custom:
bucketName: my-secure-files
hashTable: FileHashes
Typical SHA-224 integration architecture with caching and persistent storage