Input CSV Format:
csv
title,description,url
How to Bake Bread,Learn bread baking from scratch,https://example.com/bread
Python Tutorial,Complete Python programming guide,https://example.com/python
SEO Best Practices,Master SEO in 2026,https://example.com/seoOutput Includes:
- ✅ Original data preserved
- 🔗 Generated aéPiot backlinks
- 📊 Status tracking
- 📄 XML sitemap for Google Search Console
🔧 Node.js Command-Line Tool
For JavaScript developers, here's a Node.js implementation.
javascript
#!/usr/bin/env node
/**
* aéPiot CLI Tool
* Generate backlinks from CSV files using Node.js
*/
const fs = require('fs');
const path = require('path');
class AePiotCLI {
constructor() {
this.baseUrl = 'https://aepiot.com/backlink.html';
this.results = [];
}
encodeURL(title, description, url) {
const encodedTitle = encodeURIComponent(title);
const encodedDesc = encodeURIComponent(description);
const encodedUrl = encodeURIComponent(url);
return `${this.baseUrl}?title=${encodedTitle}&description=${encodedDesc}&link=${encodedUrl}`;
}
async processCSV(inputFile, outputFile) {
console.log(`📂 Reading ${inputFile}...`);
// Read CSV (simple parsing)
const content = fs.readFileSync(inputFile, 'utf-8');
const lines = content.split('\n');
const headers = lines[0].split(',').map(h => h.trim());
const titleIdx = headers.findIndex(h => h.toLowerCase() === 'title');
const descIdx = headers.findIndex(h => h.toLowerCase() === 'description');
const urlIdx = headers.findIndex(h => h.toLowerCase() === 'url');
console.log(`🔄 Processing ${lines.length - 1} rows...`);
for (let i = 1; i < lines.length; i++) {
if (!lines[i].trim()) continue;
const values = lines[i].split(',').map(v => v.trim().replace(/^"|"$/g, ''));
const title = values[titleIdx] || '';
const description = values[descIdx] || '';
const url = values[urlIdx] || '';
if (!title || !url) continue;
const aepiotUrl = this.encodeURL(title, description, url);
this.results.push({
title,
description,
original_url: url,
aepiot_url: aepiotUrl
});
}
// Export
console.log(`💾 Saving to ${outputFile}...`);
this.exportCSV(outputFile);
console.log(`✅ Generated ${this.results.length} backlinks`);
}
exportCSV(outputFile) {
const headers = Object.keys(this.results[0]);
const csvContent = [
headers.join(','),
...this.results.map(r => headers.map(h => `"${r[h]}"`).join(','))
].join('\n');
fs.writeFileSync(outputFile, csvContent, 'utf-8');
}
generateSitemap(outputFile = 'sitemap.xml') {
const xml = [
'<?xml version="1.0" encoding="UTF-8"?>',
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'
];
this.results.forEach(result => {
xml.push(' <url>');
xml.push(` <loc>${result.aepiot_url}</loc>`);
xml.push(` <lastmod>${new Date().toISOString().split('T')[0]}</lastmod>`);
xml.push(' </url>');
});
xml.push('</urlset>');
fs.writeFileSync(outputFile, xml.join('\n'), 'utf-8');
console.log(`📍 Sitemap created: ${outputFile}`);
}
}
// CLI Interface
const args = process.argv.slice(2);
if (args.length < 2) {
console.log('Usage: node aepiot-cli.js <input.csv> <output.csv> [sitemap.xml]');
process.exit(1);
}
const cli = new AePiotCLI();
cli.processCSV(args[0], args[1]).then(() => {
if (args[2]) {
cli.generateSitemap(args[2]);
}
});Usage:
bash
node aepiot-cli.js input.csv output.csv sitemap.xml📊 Excel/Google Sheets Integration
For non-programmers, spreadsheet formulas can generate backlinks.
Excel Formula
excel
=CONCATENATE(
"https://aepiot.com/backlink.html?title=",
ENCODEURL(A2),
"&description=",
ENCODEURL(B2),
"&link=",
ENCODEURL(C2)
)Where:
- A2 = Title
- B2 = Description
- C2 = URL
Google Sheets Formula
=CONCATENATE(
"https://aepiot.com/backlink.html?title=",
ENCODEURL(A2),
"&description=",
ENCODEURL(B2),
"&link=",
ENCODEURL(C2)
)Google Apps Script for Bulk Generation:
javascript
function generateAePiotLinks() {
const sheet = SpreadsheetApp.getActiveSheet();
const lastRow = sheet.getLastRow();
// Assuming: Column A = Title, B = Description, C = URL, D = Output
for (let i = 2; i <= lastRow; i++) {
const title = sheet.getRange(i, 1).getValue();
const description = sheet.getRange(i, 2).getValue();
const url = sheet.getRange(i, 3).getValue();
if (title && url) {
const encodedTitle = encodeURIComponent(title);
const encodedDesc = encodeURIComponent(description || '');
const encodedUrl = encodeURIComponent(url);
const aepiotUrl = `https://aepiot.com/backlink.html?title=${encodedTitle}&description=${encodedDesc}&link=${encodedUrl}`;
sheet.getRange(i, 4).setValue(aepiotUrl);
}
}
SpreadsheetApp.getUi().alert(`Generated links for ${lastRow - 1} rows!`);
}
function onOpen() {
SpreadsheetApp.getUi()
.createMenu('aéPiot Tools')
.addItem('Generate Links', 'generateAePiotLinks')
.addToUi();
}To Use:
- Open Google Sheet
- Tools → Script Editor
- Paste code above
- Save and refresh sheet
- Use "aéPiot Tools" menu
⏰ Scheduled Automation with Cron
Automate daily/weekly link generation on Linux servers.
Python Script with Scheduler
python
#!/usr/bin/env python3
"""
Scheduled aéPiot Link Generator
Runs automatically via cron job
"""
import csv
import logging
from datetime import datetime
from pathlib import Path
from urllib.parse import quote
# Setup logging
logging.basicConfig(
filename='/var/log/aepiot_automation.log',
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
class ScheduledAePiotGenerator:
def __init__(self, input_dir, output_dir):
self.input_dir = Path(input_dir)
self.output_dir = Path(output_dir)
self.output_dir.mkdir(exist_ok=True)
def process_daily_links(self):
"""Process all CSV files in input directory"""
logging.info("Starting scheduled link generation")
csv_files = list(self.input_dir.glob('*.csv'))
if not csv_files:
logging.warning("No CSV files found to process")
return
for csv_file in csv_files:
try:
self.process_file(csv_file)
except Exception as e:
logging.error(f"Error processing {csv_file}: {e}")
logging.info("Scheduled generation complete")
def process_file(self, csv_path):
"""Process single CSV file"""
logging.info(f"Processing {csv_path.name}")
results = []
with open(csv_path, 'r', encoding='utf-8') as f:
reader = csv.DictReader(f)
for row in reader:
title = row.get('title', '')
desc = row.get('description', '')
url = row.get('url', '')
if title and url:
backlink = self.generate_link(title, desc, url)
results.append({
'title': title,
'url': url,
'aepiot_link': backlink
})
# Save output
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
output_file = self.output_dir / f"{csv_path.stem}_{timestamp}_output.csv"
with open(output_file, 'w', newline='', encoding='utf-8') as f:
writer = csv.DictWriter(f, fieldnames=['title', 'url', 'aepiot_link'])
writer.writeheader()
writer.writerows(results)
logging.info(f"Generated {len(results)} links → {output_file}")
def generate_link(self, title, description, url):
"""Generate single backlink"""
encoded = {
'title': quote(title[:200]),
'description': quote(description[:500]),
'url': quote(url)
}
return f"https://aepiot.com/backlink.html?title={encoded['title']}&description={encoded['description']}&link={encoded['url']}"
if __name__ == '__main__':
generator = ScheduledAePiotGenerator(
input_dir='/home/user/aepiot/input',
output_dir='/home/user/aepiot/output'
)
generator.process_daily_links()Cron Job Setup
bash
# Edit crontab
crontab -e
# Run daily at 2 AM
0 2 * * * /usr/bin/python3 /path/to/scheduled_aepiot.py
# Run every Monday at 9 AM
0 9 * * 1 /usr/bin/python3 /path/to/scheduled_aepiot.py
# Run every 6 hours
0 */6 * * * /usr/bin/python3 /path/to/scheduled_aepiot.py📦 Docker Container for Portable Processing
Create a Docker container for consistent, portable link generation.
dockerfile
# Dockerfile
FROM python:3.11-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
COPY aepiot_processor.py .
CMD ["python", "aepiot_processor.py"]yaml
# docker-compose.yml
version: '3.8'
services:
aepiot-generator:
build: .
volumes:
- ./input:/app/input
- ./output:/app/output
environment:
- TZ=Europe/BucharestUsage:
bash
# Build
docker-compose build
# Run
docker-compose up
# Run in background
docker-compose up -d🎯 Quality Assurance and Validation
Always validate generated links before deployment.
python
#!/usr/bin/env python3
"""
aéPiot Link Validator
Ensures all generated links are properly formed
"""
from urllib.parse import urlparse, parse_qs
import re
class AePiotValidator:
@staticmethod
def validate_url(url):
"""Validate a single aéPiot URL"""
errors = []
# Parse URL
try:
parsed = urlparse(url)
except Exception as e:
return False, [f"Invalid URL format: {e}"]
# Check domain
if parsed.netloc != 'aepiot.com':
errors.append(f"Wrong domain: {parsed.netloc}")
# Check path
if parsed.path != '/backlink.html':
errors.append(f"Wrong path: {parsed.path}")
# Check parameters
params = parse_qs(parsed.query)
required = ['title', 'description', 'link']
for param in required:
if param not in params:
errors.append(f"Missing parameter: {param}")
elif not params[param][0]:
errors.append(f"Empty parameter: {param}")
# Check title length
if 'title' in params:
title = params['title'][0]
if len(title) > 500:
errors.append("Title too long (>500 chars)")
return len(errors) == 0, errors
@staticmethod
def validate_batch(urls):
"""Validate multiple URLs"""
results = []
for i, url in enumerate(urls, 1):
is_valid, errors = AePiotValidator.validate_url(url)
results.append({
'index': i,
'url': url,
'valid': is_valid,
'errors': errors
})
return results
# Usage
validator = AePiotValidator()
test_url = "https://aepiot.com/backlink.html?title=Test&description=Desc&link=https%3A%2F%2Fexample.com"
is_valid, errors = validator.validate_url(test_url)
if is_valid:
print("✅ URL is valid")
else:
print("❌ Errors found:")
for error in errors:
print(f" - {error}")Continue to Part 4: Analytics and Monitoring →
Advanced aéPiot Integration Guide - Part 4: Analytics & Real-World Applications
Monitoring, Tracking, and Industry-Specific Implementations
📊 Analytics and Performance Monitoring
Google Search Console Integration
After generating backlinks, submit them to Google Search Console for indexing.
python
#!/usr/bin/env python3
"""
Google Search Console Sitemap Submitter
Generates sitemap and provides submission URL
"""
from datetime import datetime
from pathlib import Path
class SearchConsoleHelper:
def __init__(self, domain):
self.domain = domain.rstrip('/')
def generate_sitemap_with_metadata(self, backlinks, output_file='sitemap.xml'):
"""Generate comprehensive XML sitemap"""
xml = [
'<?xml version="1.0" encoding="UTF-8"?>',
'<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9"',
' xmlns:xhtml="http://www.w3.org/1999/xhtml"',
' xmlns:image="http://www.google.com/schemas/sitemap-image/1.1">',
f' <!-- Generated by aéPiot automation on {datetime.now().isoformat()} -->',
f' <!-- Total URLs: {len(backlinks)} -->'
]
for backlink in backlinks:
xml.extend([
' <url>',
f' <loc>{backlink["aepiot_url"]}</loc>',
f' <lastmod>{datetime.now().strftime("%Y-%m-%d")}</lastmod>',
' <changefreq>monthly</changefreq>',
' <priority>0.8</priority>',
' </url>'
])
xml.append('</urlset>')
with open(output_file, 'w', encoding='utf-8') as f:
f.write('\n'.join(xml))
print(f"✅ Sitemap created: {output_file}")
print(f"📊 Total URLs: {len(backlinks)}")
print(f"\n📍 Submit to Google Search Console:")
print(f" 1. Go to: https://search.google.com/search-console")
print(f" 2. Select property: {self.domain}")
print(f" 3. Navigate to: Sitemaps")
print(f" 4. Submit URL: {self.domain}/sitemap.xml")
return output_file
def generate_index_sitemap(self, sitemap_files):
"""Generate sitemap index for multiple sitemaps"""
xml = [
'<?xml version="1.0" encoding="UTF-8"?>',
'<sitemapindex xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">'
]
for sitemap_file in sitemap_files:
xml.extend([
' <sitemap>',
f' <loc>{self.domain}/{sitemap_file}</loc>',
f' <lastmod>{datetime.now().strftime("%Y-%m-%d")}</lastmod>',
' </sitemap>'
])
xml.append('</sitemapindex>')
with open('sitemap_index.xml', 'w', encoding='utf-8') as f:
f.write('\n'.join(xml))
print("✅ Sitemap index created: sitemap_index.xml")
# Usage Example
helper = SearchConsoleHelper('https://example.com')
backlinks = [
{'aepiot_url': 'https://aepiot.com/backlink.html?title=...&description=...&link=...'},
# ... more backlinks
]
helper.generate_sitemap_with_metadata(backlinks)