Actualizar meerkat.py
This commit is contained in:
parent
5cec767e15
commit
692861d261
90
meerkat.py
90
meerkat.py
@ -9,22 +9,21 @@ def reverse_dns_lookup_sync(ip):
|
||||
Perform a reverse DNS lookup (synchronous, for multithreading).
|
||||
"""
|
||||
try:
|
||||
print(f"Performing synchronous DNS lookup for IP: {ip}")
|
||||
hostname = socket.gethostbyaddr(ip)[0]
|
||||
print(f"DNS lookup successful for IP {ip}: {hostname}")
|
||||
return hostname
|
||||
except socket.herror:
|
||||
print(f"DNS lookup failed for IP: {ip}")
|
||||
return None
|
||||
|
||||
async def reverse_dns_lookup(ip, executor):
|
||||
"""
|
||||
Perform a reverse DNS lookup using a thread pool.
|
||||
"""
|
||||
print(f"Performing DNS lookup for IP: {ip}")
|
||||
print(f"Starting DNS lookup for IP: {ip}")
|
||||
loop = asyncio.get_event_loop()
|
||||
hostname = await loop.run_in_executor(executor, reverse_dns_lookup_sync, ip)
|
||||
if hostname:
|
||||
print(f"DNS lookup successful for IP {ip}: {hostname}")
|
||||
else:
|
||||
print(f"DNS lookup failed for IP: {ip}")
|
||||
return hostname
|
||||
|
||||
async def fetch_website_info(session, ip, hostname):
|
||||
@ -48,43 +47,92 @@ async def fetch_website_info(session, ip, hostname):
|
||||
if '<meta name="description"' in html:
|
||||
desc_split = html.split('<meta name="description"')[1]
|
||||
description = desc_split.split('content="')[1].split('"')[0]
|
||||
print(f"Extracted title: {title}, description: {description}")
|
||||
return {"title": title, "description": description}
|
||||
except Exception as e:
|
||||
print(f"Failed to fetch from {url}: {e}")
|
||||
continue
|
||||
print(f"No valid website info found for IP: {ip}")
|
||||
return {"title": "No Title", "description": "No Description"}
|
||||
|
||||
async def analyze_ip(ip, session, executor, results):
|
||||
async def analyze_ip(ip, session, executor):
|
||||
"""
|
||||
Analyze a single IP address to fetch hostname and website info.
|
||||
Analyze a single IP address: reverse DNS and fetch website info.
|
||||
"""
|
||||
print(f"Analyzing IP: {ip}")
|
||||
hostname = await reverse_dns_lookup(ip, executor)
|
||||
website_info = await fetch_website_info(session, ip, hostname)
|
||||
result = {"ip": ip, "hostname": hostname or "Unknown", **website_info}
|
||||
results.append(result)
|
||||
print(f"Analysis complete for IP: {ip}")
|
||||
|
||||
# Save results to file after each IP is analyzed
|
||||
with open("output.json", "w") as file:
|
||||
json.dump(results, file, indent=4)
|
||||
print("Progress saved to 'output.json'")
|
||||
return result
|
||||
|
||||
async def main():
|
||||
# Load the previous results and last scanned IP
|
||||
try:
|
||||
with open("output.json", "r") as file:
|
||||
data = json.load(file)
|
||||
if isinstance(data, dict): # Check if the loaded data is a dictionary
|
||||
results = data.get("results", [])
|
||||
last_scanned_ip = data.get("last_scanned_ip", None)
|
||||
else: # In case it's a list or incorrectly formatted
|
||||
results = []
|
||||
last_scanned_ip = None
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
results = []
|
||||
last_scanned_ip = None
|
||||
|
||||
# Load IPs from the JSON file
|
||||
with open("ips_up.json", "r") as file:
|
||||
ips = json.load(file)
|
||||
|
||||
results = [] # Store results incrementally
|
||||
executor = ThreadPoolExecutor() # ThreadPool for DNS lookups
|
||||
# If there's a last scanned IP, find the index of the next IP to scan
|
||||
start_index = 0
|
||||
if last_scanned_ip:
|
||||
try:
|
||||
start_index = ips.index(last_scanned_ip) + 1
|
||||
print(f"Resuming from IP: {last_scanned_ip}")
|
||||
except ValueError:
|
||||
print(f"Last scanned IP {last_scanned_ip} not found in the list, starting from the beginning.")
|
||||
|
||||
print(f"Loaded {len(ips)} IPs for analysis.")
|
||||
|
||||
# Limit the number of threads for DNS lookups
|
||||
max_threads = 20 # Adjust based on your system's resources
|
||||
executor = ThreadPoolExecutor(max_threads)
|
||||
|
||||
# Use a single aiohttp session for all requests
|
||||
async with ClientSession() as session:
|
||||
tasks = []
|
||||
for ip in ips:
|
||||
print(f"Starting analysis for IP: {ip}")
|
||||
tasks.append(analyze_ip(ip, session, executor, results))
|
||||
await asyncio.gather(*tasks)
|
||||
tasks = [
|
||||
analyze_ip(ip, session, executor) for ip in ips[start_index:]
|
||||
]
|
||||
# Run tasks concurrently
|
||||
try:
|
||||
batch_size = 10 # Write to file every 10 IPs
|
||||
batch_count = 0
|
||||
for coro in asyncio.as_completed(tasks):
|
||||
result = await coro
|
||||
results.append(result)
|
||||
print(f"Result: {result}")
|
||||
batch_count += 1
|
||||
if batch_count >= batch_size:
|
||||
# Save progress to file after every batch of 10 results
|
||||
with open("output.json", "w") as file:
|
||||
json.dump({"results": results, "last_scanned_ip": result["ip"]}, file, indent=4)
|
||||
print(f"Progress saved to 'output.json'. Last scanned IP: {result['ip']}")
|
||||
batch_count = 0 # Reset the batch count
|
||||
|
||||
# Final progress save
|
||||
with open("output.json", "w") as file:
|
||||
json.dump({"results": results, "last_scanned_ip": results[-1]["ip"]}, file, indent=4)
|
||||
print("Final progress saved to 'output.json'.")
|
||||
except KeyboardInterrupt:
|
||||
print("\nProcess interrupted. Saving current progress...")
|
||||
with open("output.json", "w") as file:
|
||||
json.dump({"results": results, "last_scanned_ip": ips[start_index - 1]}, file, indent=4)
|
||||
print("Progress saved successfully. Exiting gracefully.")
|
||||
return
|
||||
|
||||
print("Analysis complete. Final results saved to 'output.json'.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
||||
asyncio.run(main())
|
||||
|
Loading…
Reference in New Issue
Block a user