import java.net.MalformedURLException;
import java.net.URL;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class RequestThrottler {
private final int maxRequestsPerSecond;
private final int timeoutMillis;
private final ConcurrentHashMap<URL, Long> lastRequestTimes = new ConcurrentHashMap<>();
private final ConcurrentLinkedQueue<URL> queue = new ConcurrentLinkedQueue<>();
private final ExecutorService executor = Executors.newFixedThreadPool(5); // Adjust thread pool size as needed
public RequestThrottler(int maxRequestsPerSecond, int timeoutMillis) {
this.maxRequestsPerSecond = maxRequestsPerSecond;
this.timeoutMillis = timeoutMillis;
}
public void enqueueRequest(URL url) {
queue.offer(url);
processQueue();
}
private void processQueue() {
while (!queue.isEmpty()) {
URL url = queue.poll();
if (isAllowedToRequest(url)) {
executor.submit(fetchHtmlPage, url);
} else {
// Request throttled, re-enqueue after waiting
long waitTime = calculateWaitTime(url);
try {
TimeUnit.MILLISECONDS.sleep(waitTime);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
queue.offer(url);
}
}
}
private boolean isAllowedToRequest(URL url) {
long lastRequestTime = lastRequestTimes.getOrDefault(url, 0L);
long currentTime = System.currentTimeMillis();
if (currentTime - lastRequestTime < timeoutMillis) {
return false; // Request throttled
}
lastRequestTimes.put(url, currentTime);
return true; // Request allowed
}
private long calculateWaitTime(URL url) {
long lastRequestTime = lastRequestTimes.getOrDefault(url, 0L);
long currentTime = System.currentTimeMillis();
long timeSinceLastRequest = currentTime - lastRequestTime;
return (1000 / maxRequestsPerSecond) * (maxRequestsPerSecond - (int) (timeSinceLastRequest / (1000.0/maxRequestsPerSecond)));
}
private Runnable fetchHtmlPage(URL url) {
return () -> {
try {
// Simulate fetching the HTML page
System.out.println("Fetching: " + url.toURI());
TimeUnit.SECONDS.sleep(2); // Simulate network latency
System.out.println("Fetched: " + url.toURI());
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
};
}
public void shutdown() {
executor.shutdown();
try {
executor.awaitTermination(5, TimeUnit.SECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
public static void main(String[] args) throws MalformedURLException {
RequestThrottler throttler = new RequestThrottler(2, 1000); // Allow 2 requests per second, 1 second timeout
URL url1 = new URL("https://www.example.com");
URL url2 = new URL("https://www.google.com");
URL url3 = new URL("https://www.example.com"); // Same as url1
throttler.enqueueRequest(url1);
throttler.enqueueRequest(url2);
throttler.enqueueRequest(url3);
throttler.enqueueRequest(new URL("https://www.yahoo.com"));
throttler.enqueueRequest(new URL("https://www.bing.com"));
throttler.shutdown();
}
}
Add your comment