This commit is contained in:
蒋小陌 2024-08-30 09:36:55 +08:00
commit 5f25766739
4 changed files with 355 additions and 0 deletions

24
.gitignore vendored Normal file
View File

@ -0,0 +1,24 @@
.DS_Store
/node_modules
/backup
# local env files
.env.local
.env.*.local
# Log files
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
# Editor directories and files
.idea
.vscode
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?
package-lock.json
pnpm-lock.yaml

133
index.js Normal file
View File

@ -0,0 +1,133 @@
const http = require('http');
const https = require('https');
const url = require('url');
const querystring = require('querystring');
const apiEndpoint = process.env.url || 'https://oss.x-php.com/alist/link';
const requestTimeout = 10000; // 10 seconds
const cache = {};
// Get port from environment variable or default to 3000
const PORT = process.env.PORT || 3000;
const server = http.createServer((req, res) => {
if (req.url === '/favicon.ico') {
res.writeHead(204);
res.end();
return;
}
const parsedUrl = url.parse(req.url, true);
const path = parsedUrl.pathname;
const sign = parsedUrl.query.sign || '';
// Check if the data is in cache and not expired
const cacheEntry = cache[path];
if (cacheEntry && cacheEntry.expiration > Date.now()) {
// 清理所有过期的缓存
Object.keys(cache).forEach(key => {
if (cache[key].expiration < Date.now()) {
delete cache[key];
}
});
serveFromCache(cacheEntry, res);
return;
} else {
delete cache[path]; // Remove expired cache entry if exists
}
// Construct the POST data
const postData = querystring.stringify({ path, sign });
// Request the real URL from the API
const apiReq = https.request(apiEndpoint, {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json',
'Content-Length': Buffer.byteLength(postData),
'sign': sign
},
timeout: requestTimeout
}, (apiRes) => {
let data = '';
apiRes.on('data', chunk => data += chunk);
apiRes.on('end', () => {
try {
const apiData = JSON.parse(data);
if (apiData.code === 200 && apiData.data && apiData.data.url) {
const { url: realUrl, cloudtype, expiration } = apiData.data;
// Cache the response if expiration is greater than 0
if (expiration > 0) {
cache[path] = {
realUrl,
cloudtype,
expiration: Date.now() + expiration * 1000
};
}
fetchAndServe(realUrl, cloudtype, res);
} else {
res.writeHead(502, { 'Content-Type': 'text/plain' });
res.end(apiData.message || 'Bad Gateway');
}
} catch (error) {
res.writeHead(502, { 'Content-Type': 'text/plain' });
res.end('Bad Gateway: Failed to decode JSON');
}
});
});
apiReq.on('error', (e) => {
if (e.code === 'ETIMEDOUT') {
res.writeHead(504, { 'Content-Type': 'text/plain' });
res.end('Gateway Timeout');
} else {
res.writeHead(500, { 'Content-Type': 'text/plain' });
res.end('Internal Server Error');
}
});
apiReq.write(postData);
apiReq.end();
});
function fetchAndServe(realUrl, cloudtype, res) {
const realReq = https.get(realUrl, { timeout: requestTimeout * 10 }, (realRes) => {
res.writeHead(realRes.statusCode, {
...realRes.headers,
'cloudtype': cloudtype
});
realRes.pipe(res);
});
realReq.on('error', (e) => {
res.writeHead(502, { 'Content-Type': 'text/plain' });
res.end(`Bad Gateway: ${realUrl}`);
});
}
function serveFromCache(cacheEntry, res) {
fetchAndServe(cacheEntry.realUrl, cacheEntry.cloudtype, res);
}
server.listen(PORT, () => {
console.log(`Proxy server is running on http://localhost:${PORT}`);
});
// Graceful shutdown
process.on('SIGINT', () => {
console.log('Received SIGINT. Shutting down gracefully...');
server.close(() => {
console.log('Server closed.');
process.exit(0);
});
// Force shutdown after 10 seconds if not closed
setTimeout(() => {
console.error('Forcing shutdown...');
process.exit(1);
}, 10000);
});

112
index.py Normal file
View File

@ -0,0 +1,112 @@
import http.server
import urllib.request
import urllib.parse
import json
import ssl
import time
class ProxyHTTPRequestHandler(http.server.BaseHTTPRequestHandler):
api_endpoint = 'https://oss.x-php.com/alist/link'
request_timeout = 10 # 10 seconds
cache = {}
def do_GET(self):
self.proxy_request()
def do_POST(self):
self.proxy_request()
def proxy_request(self):
path = self.path
# Filter out favicon.ico requests
if path == '/favicon.ico':
self.send_response(204)
self.end_headers()
return
sign = self.headers.get('sign', '')
# Check if the data is in cache and not expired
cache_entry = self.cache.get(path)
if cache_entry and cache_entry['expiration'] > time.time():
self.serve_from_cache(cache_entry)
return
else:
self.cache.pop(path, None) # Remove expired cache entry if exists
# Construct the POST data
post_data = urllib.parse.urlencode({'path': path, 'sign': sign}).encode('utf-8')
try:
# Request the real URL from the API
context = ssl._create_unverified_context()
req = urllib.request.Request(self.api_endpoint, data=post_data, method='POST')
req.add_header('Accept', 'application/json')
with urllib.request.urlopen(req, timeout=self.request_timeout, context=context) as response:
api_response = response.read().decode('utf-8')
# Ensure the response is JSON
try:
api_data = json.loads(api_response)
except json.JSONDecodeError:
self.send_error(502, 'Bad Gateway: Failed to decode JSON')
return
if isinstance(api_data, dict) and api_data.get('code') == 200 and api_data.get('data') and api_data['data'].get('url'):
real_url = api_data['data']['url']
cloud_type = api_data['data']['cloudtype']
expiration = int(api_data['data'].get('expiration', 0)) # Convert expiration to int
# Cache the response if expiration is greater than 0
if expiration > 0:
self.cache[path] = {
'real_url': real_url,
'cloud_type': cloud_type,
'expiration': time.time() + expiration
}
self.fetch_and_serve(real_url, cloud_type)
else:
self.send_error(502, api_data.get('message', 'Bad Gateway'))
except urllib.error.URLError as api_error:
if isinstance(api_error.reason, str) and 'timed out' in api_error.reason:
self.send_error(504, 'Gateway Timeout')
else:
self.send_error(500, 'Internal Server Error')
def fetch_and_serve(self, real_url, cloud_type):
try:
context = ssl._create_unverified_context()
with urllib.request.urlopen(real_url, timeout=self.request_timeout, context=context) as real_response:
self.send_response(real_response.status)
for key, value in real_response.getheaders():
self.send_header(key, value)
self.send_header('cloudtype', cloud_type)
self.end_headers()
self.wfile.write(real_response.read())
except ConnectionResetError:
print(f"Connection reset by peer when fetching {real_url}")
except BrokenPipeError:
print(f"Broken pipe when serving {real_url}")
except Exception as real_error:
self.send_error(502, f'Bad Gateway: {real_url}')
def serve_from_cache(self, cache_entry):
real_url = cache_entry['real_url']
cloud_type = cache_entry['cloud_type']
self.fetch_and_serve(real_url, cloud_type)
def run(server_class=http.server.HTTPServer, handler_class=ProxyHTTPRequestHandler, port=3000):
server_address = ('', port)
httpd = server_class(server_address, handler_class)
print(f"Proxy server is running on http://localhost:{port}")
try:
httpd.serve_forever()
except KeyboardInterrupt:
print("\nServer is shutting down...")
httpd.server_close()
if __name__ == '__main__':
run()

86
torrents.js Normal file
View File

@ -0,0 +1,86 @@
const http = require('http');
const https = require('https');
const url = require('url');
const PORT = 3000;
const MAX_RETRIES = 3;
const INITIAL_TIMEOUT = 3000; // 初始超时时间设置为3秒
const BACKOFF_FACTOR = 2; // 指数退避因子
const server = http.createServer((req, res) => {
const reqUrl = url.parse(req.url, true);
const id = reqUrl.pathname.split('/').pop();
// 检查 ID 是否存在且格式正确
if (!id || isNaN(id)) {
res.writeHead(400, { 'Content-Type': 'text/plain' });
res.end('Invalid ID');
return;
}
const targetUrl = `https://xxxclub.to/torrents/details/${id}`;
console.log(`Target URL: ${targetUrl}`);
let responseSent = false;
const makeRequest = (retryCount = 0, timeout = INITIAL_TIMEOUT) => {
if (responseSent) return;
const options = url.parse(targetUrl);
options.method = 'GET';
options.timeout = timeout;
const proxyReq = https.request(options, (proxyRes) => {
let data = '';
proxyRes.on('data', (chunk) => {
data += chunk;
});
proxyRes.on('end', () => {
if (!responseSent) {
res.writeHead(proxyRes.statusCode, proxyRes.headers);
res.end(data);
responseSent = true;
}
});
});
proxyReq.on('timeout', () => {
console.error('Request timed out.');
proxyReq.abort();
if (retryCount < MAX_RETRIES) {
const newTimeout = timeout * BACKOFF_FACTOR;
console.log(`Retrying... (${retryCount + 1}/${MAX_RETRIES}) with timeout ${newTimeout}ms`);
makeRequest(retryCount + 1, newTimeout);
} else if (!responseSent) {
res.writeHead(504, { 'Content-Type': 'text/plain' });
res.end('Request timed out.');
responseSent = true;
}
});
proxyReq.on('error', (e) => {
console.error(`Problem with request: ${e.message}`);
if (retryCount < MAX_RETRIES) {
const newTimeout = timeout * BACKOFF_FACTOR;
console.log(`Retrying... (${retryCount + 1}/${MAX_RETRIES}) with timeout ${newTimeout}ms`);
makeRequest(retryCount + 1, newTimeout);
} else if (!responseSent) {
res.writeHead(500, { 'Content-Type': 'text/plain' });
res.end('Error occurred while fetching the data.');
responseSent = true;
}
});
proxyReq.end();
};
makeRequest();
});
server.listen(PORT, () => {
console.log(`Proxy server is running on http://localhost:${PORT}`);
});