twitter-checker/main.py
2026-01-21 04:25:00 +02:00

1081 lines
47 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#!/usr/bin/env python3
import sys
import asyncio
import json
from pathlib import Path
from datetime import datetime
from file_utils import scan_cookies_directory, save_account_info_token
from proxy_handler import read_proxy_file
from account_checker import check_account, check_account_by_token
from auth_token_handler import scan_auth_tokens_directory, read_auth_tokens_from_file
import debug_logger as log
CONFIG_FILE = Path("config.json")
def load_config():
default_config = {
'cookies_dir': 'cookies',
'output_dir': 'results',
'cookies_only_mode': False,
'threads': 1,
'use_proxy': True,
'auth_mode': 'cookies',
'save_format': 'auth_token',
'filter_enabled': False,
'filter_min_followers': 0,
'filter_max_followers': 0,
'filter_min_posts': 0,
'filter_min_likes': 0,
'filter_verified_only': False,
'filtered_output_dir': 'filtered'
}
if CONFIG_FILE.exists():
try:
with open(CONFIG_FILE, 'r') as f:
loaded = json.load(f)
default_config.update(loaded)
except:
pass
return default_config
def save_config(cfg):
try:
with open(CONFIG_FILE, 'w') as f:
json.dump(cfg, f, indent=2)
except:
pass
config = load_config()
def print_banner():
print("\033[2J\033[H", end='')
print("\033[1;36m" + "=" * 70)
print("██╗ ██╗ ██████╗██╗ ██╗███████╗ ██████╗██╗ ██╗███████╗██████╗ ")
print("╚██╗██╔╝ ██╔════╝██║ ██║██╔════╝██╔════╝██║ ██╔╝██╔════╝██╔══██╗")
print(" ╚███╔╝ ██║ ███████║█████╗ ██║ █████╔╝ █████╗ ██████╔╝")
print(" ██╔██╗ ██║ ██╔══██║██╔══╝ ██║ ██╔═██╗ ██╔══╝ ██╔══██╗")
print("██╔╝ ██╗ ╚██████╗██║ ██║███████╗╚██████╗██║ ██╗███████╗██║ ██║")
print("╚═╝ ╚═╝ ╚═════╝╚═╝ ╚═╝╚══════╝ ╚═════╝╚═╝ ╚═╝╚══════╝╚═╝ ╚═╝")
print("=" * 70)
print("\033[1;33m X/Twitter cookies Checker")
print("\033[1;35m Contact: @CrystalStud for support/buy")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print()
def print_menu():
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ MAIN MENU │")
print("└─────────────────────────────────────┘\033[0m\n")
print("\033[1;37m[1]\033[0m Check Accounts")
print("\033[1;37m[2]\033[0m Filter Accounts")
print("\033[1;37m[3]\033[0m Extract Auth Tokens")
print("\033[1;37m[4]\033[0m Convert Cookie Format")
print("\033[1;37m[5]\033[0m Extract X.com Cookies from Logs")
print("\033[1;37m[6]\033[0m Settings")
print("\033[1;37m[7]\033[0m About")
print("\033[1;37m[0]\033[0m Exit")
print()
def get_input(prompt, default=None):
if default:
user_input = input(f"\033[1;33m{prompt}\033[0m [\033[1;36m{default}\033[0m]: ").strip()
return user_input if user_input else default
else:
return input(f"\033[1;33m{prompt}\033[0m: ").strip()
def print_success(msg):
print(f"\033[1;32m✓\033[0m {msg}")
def print_error(msg):
print(f"\033[1;31m✗\033[0m {msg}")
def print_info(msg):
print(f"\033[1;34m\033[0m {msg}")
async def process_batch(batch, proxies, output_dir, semaphore, cookies_only_mode, results, user_data_list=None):
tasks = []
for cookie_file in batch:
task = check_account(cookie_file, proxies, output_dir, semaphore, cookies_only_mode, user_data_list)
tasks.append(task)
batch_results = await asyncio.gather(*tasks)
for success, verified, msg in batch_results:
results.append((success, verified))
async def process_batch_tokens(batch, proxies, output_dir, semaphore, cookies_only_mode, results, user_data_list=None):
tasks = []
for token_file, token_line_num, auth_token in batch:
task = check_account_by_token(auth_token, token_file, token_line_num, proxies, output_dir, semaphore, cookies_only_mode, user_data_list)
tasks.append(task)
batch_results = await asyncio.gather(*tasks)
for success, verified, msg in batch_results:
results.append((success, verified))
async def run_checker(cookies_path, proxy_file=None):
log.info("=" * 80)
log.info("Starting checker run")
log.info(f"Cookies path: {cookies_path}")
log.info(f"Proxy file: {proxy_file}")
log.info(f"Config: {config}")
log.info("=" * 80)
start_time = datetime.now()
output_dir = Path(config['output_dir'])
output_dir.mkdir(exist_ok=True)
try:
print_info(f"Scanning for cookies in: {cookies_path}")
log.debug(f"Scanning directory: {cookies_path}")
cookie_files = scan_cookies_directory(cookies_path)
log.info(f"Found {len(cookie_files) if cookie_files else 0} cookie files")
if not cookie_files:
print_error("No cookie files found (.json or .txt)")
return
print_success(f"Found {len(cookie_files)} cookie file(s)")
proxies = []
if proxy_file:
print_info(f"Loading proxies from: {proxy_file}")
try:
proxies = read_proxy_file(proxy_file)
if not proxies:
print_error("No valid proxies found, continuing without proxy")
except:
print_error("Failed to load proxies, continuing without proxy")
print()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m CHECKING ACCOUNTS\033[0m")
print(f"\033[1;35m Threads: {config['threads']}\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print()
num_threads = config['threads']
concurrent_per_thread = 5
batch_size = len(cookie_files) // num_threads
batches = []
for i in range(num_threads):
start_idx = i * batch_size
if i == num_threads - 1:
end_idx = len(cookie_files)
else:
end_idx = (i + 1) * batch_size
batches.append(cookie_files[start_idx:end_idx])
semaphore = asyncio.Semaphore(concurrent_per_thread)
results = []
user_data_list = []
check_start_time = datetime.now()
total_checked = 0
async def monitor_progress():
nonlocal total_checked
while total_checked < len(cookie_files):
await asyncio.sleep(0.3)
checked = len(results)
if checked != total_checked:
total_checked = checked
successful = sum(1 for s, v in results if s)
failed = sum(1 for s, v in results if not s)
verified_count = sum(1 for s, v in results if s and v)
remaining = len(cookie_files) - total_checked
if total_checked > 0 and remaining > 0:
elapsed = (datetime.now() - check_start_time).total_seconds()
avg_time = elapsed / total_checked
eta_seconds = avg_time * remaining
eta_str = f"{int(eta_seconds // 60)}m {int(eta_seconds % 60)}s"
else:
eta_str = "calculating..."
progress_bar = "" * (total_checked * 40 // len(cookie_files)) + "" * (40 - total_checked * 40 // len(cookie_files))
print(f"\r\033[1;36m[{progress_bar}]\033[0m {total_checked}/{len(cookie_files)} | \033[1;32m✓{successful}\033[0m | \033[1;31m✗{failed}\033[0m | \033[1;33m★{verified_count}\033[0m | ETA: {eta_str} ", end='', flush=True)
monitor_task = asyncio.create_task(monitor_progress())
batch_tasks = []
for batch in batches:
task = process_batch(batch, proxies, output_dir, semaphore, config['cookies_only_mode'], results, user_data_list)
batch_tasks.append(task)
await asyncio.gather(*batch_tasks)
await monitor_task
successful = sum(1 for s, v in results if s)
failed = sum(1 for s, v in results if not s)
verified_count = sum(1 for s, v in results if s and v)
print()
print()
end_time = datetime.now()
elapsed_time = end_time - start_time
elapsed_seconds = elapsed_time.total_seconds()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m SUMMARY\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print(f"\033[1;37mTotal accounts:\033[0m {len(cookie_files)}")
print(f"\033[1;32m✓ Successful:\033[0m {successful}")
print(f"\033[1;31m✗ Failed:\033[0m {failed}")
print(f"\033[1;33m★ Verified:\033[0m {verified_count}")
print(f"\033[1;35m⏱ Time elapsed:\033[0m {elapsed_seconds:.2f}s")
print(f"\033[1;34m📁 Results saved to:\033[0m {output_dir.absolute()}/")
print("\033[1;36m" + "=" * 70 + "\033[0m")
summary_file = output_dir / f"summary_{datetime.now().strftime('%Y%m%d_%H%M%S')}.txt"
summary_lines = [
"=" * 80,
"X Checker, contact @CrystalStud for support",
"=" * 80,
f"Date: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
"",
"Results:",
f" Total accounts checked: {len(cookie_files)}",
f" Valid accounts: {successful}",
f" Invalid accounts: {failed}",
f" Verified accounts: {verified_count}",
"",
"Performance:",
f" Time elapsed: {elapsed_seconds:.2f}s",
f" Average per account: {elapsed_seconds/len(cookie_files):.2f}s",
"",
"Configuration:",
f" Proxies loaded: {len(proxies) if proxies else 0}",
f" Cookie files: {len(cookie_files)}",
"",
"=" * 80,
]
summary_file.write_text('\n'.join(summary_lines), encoding='utf-8')
print_success(f"Summary saved to: {summary_file.name}")
try:
session_file = Path("temp_session.tw_session")
if session_file.exists():
session_file.unlink()
except:
pass
except FileNotFoundError as e:
print_error(f"Error: {e}")
except Exception as e:
print_error(f"Unexpected error: {e}")
async def run_checker_tokens(tokens_path, proxy_file=None):
start_time = datetime.now()
output_dir = Path(config['output_dir'])
output_dir.mkdir(exist_ok=True)
try:
print_info(f"Scanning for auth tokens in: {tokens_path}")
token_files = scan_auth_tokens_directory(tokens_path)
if not token_files:
print_error("No token files found (.txt)")
return
print_success(f"Found {len(token_files)} token file(s)")
all_tokens = []
for token_file in token_files:
tokens = read_auth_tokens_from_file(token_file)
for line_num, token in tokens:
all_tokens.append((token_file, line_num, token))
if not all_tokens:
print_error("No valid auth tokens found in files")
return
print_success(f"Found {len(all_tokens)} auth token(s)")
proxies = []
if proxy_file:
print_info(f"Loading proxies from: {proxy_file}")
try:
proxies = read_proxy_file(proxy_file)
if not proxies:
print_error("No valid proxies found, continuing without proxy")
except:
print_error("Failed to load proxies, continuing without proxy")
print()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m CHECKING ACCOUNTS (AUTH TOKEN MODE)\033[0m")
print(f"\033[1;35m Threads: {config['threads']}\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print()
num_threads = config['threads']
concurrent_per_thread = 5
batch_size = len(all_tokens) // num_threads
batches = []
for i in range(num_threads):
start_idx = i * batch_size
if i == num_threads - 1:
end_idx = len(all_tokens)
else:
end_idx = (i + 1) * batch_size
batches.append(all_tokens[start_idx:end_idx])
semaphore = asyncio.Semaphore(concurrent_per_thread)
results = []
user_data_list = []
check_start_time = datetime.now()
total_checked = 0
async def monitor_progress():
nonlocal total_checked
while total_checked < len(all_tokens):
await asyncio.sleep(0.3)
checked = len(results)
if checked != total_checked:
total_checked = checked
successful = sum(1 for s, v in results if s)
failed = sum(1 for s, v in results if not s)
verified_count = sum(1 for s, v in results if s and v)
remaining = len(all_tokens) - total_checked
if total_checked > 0 and remaining > 0:
elapsed = (datetime.now() - check_start_time).total_seconds()
avg_time = elapsed / total_checked
eta_seconds = avg_time * remaining
eta_str = f"{int(eta_seconds // 60)}m {int(eta_seconds % 60)}s"
else:
eta_str = "calculating..."
progress_bar = "" * (total_checked * 40 // len(all_tokens)) + "" * (40 - total_checked * 40 // len(all_tokens))
print(f"\r\033[1;36m[{progress_bar}]\033[0m {total_checked}/{len(all_tokens)} | \033[1;32m✓{successful}\033[0m | \033[1;31m✗{failed}\033[0m | \033[1;33m★{verified_count}\033[0m | ETA: {eta_str} ", end='', flush=True)
monitor_task = asyncio.create_task(monitor_progress())
batch_tasks = []
for batch in batches:
task = process_batch_tokens(batch, proxies, output_dir, semaphore, config['cookies_only_mode'], results, user_data_list)
batch_tasks.append(task)
await asyncio.gather(*batch_tasks)
await monitor_task
successful = sum(1 for s, v in results if s)
failed = sum(1 for s, v in results if not s)
verified_count = sum(1 for s, v in results if s and v)
print()
print()
end_time = datetime.now()
elapsed_time = end_time - start_time
elapsed_seconds = elapsed_time.total_seconds()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m SUMMARY\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print(f"\033[1;37mTotal accounts:\033[0m {len(all_tokens)}")
print(f"\033[1;32m✓ Successful:\033[0m {successful}")
print(f"\033[1;31m✗ Failed:\033[0m {failed}")
print(f"\033[1;33m★ Verified:\033[0m {verified_count}")
print(f"\033[1;35m⏱ Time elapsed:\033[0m {elapsed_seconds:.2f}s")
print(f"\033[1;34m📁 Results saved to:\033[0m {output_dir.absolute()}/")
print("\033[1;36m" + "=" * 70 + "\033[0m")
summary_file = output_dir / f"summary_{datetime.now().strftime('%Y%m%d_%H%M%S')}.txt"
summary_lines = [
"=" * 80,
"X Checker, contact @CrystalStud for support",
"=" * 80,
f"Date: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
"",
"Results:",
f" Total accounts checked: {len(all_tokens)}",
f" Valid accounts: {successful}",
f" Invalid accounts: {failed}",
f" Verified accounts: {verified_count}",
"",
"Performance:",
f" Time elapsed: {elapsed_seconds:.2f}s",
f" Average per account: {elapsed_seconds/len(all_tokens):.2f}s",
"",
"Configuration:",
f" Proxies loaded: {len(proxies) if proxies else 0}",
f" Token files: {len(token_files)}",
f" Total tokens: {len(all_tokens)}",
"",
"=" * 80,
]
summary_file.write_text('\n'.join(summary_lines), encoding='utf-8')
print_success(f"Summary saved to: {summary_file.name}")
try:
session_file = Path("temp_session.tw_session")
if session_file.exists():
session_file.unlink()
except:
pass
except FileNotFoundError as e:
print_error(f"Error: {e}")
except Exception as e:
print_error(f"Unexpected error: {e}")
async def filter_accounts():
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ FILTER ACCOUNTS │")
print("└─────────────────────────────────────┘\033[0m\n")
print_info("Enter filter criteria (leave empty to skip):")
print()
min_followers = get_input("Minimum followers", str(config.get('filter_min_followers', 0)))
max_followers = get_input("Maximum followers (0 = unlimited)", str(config.get('filter_max_followers', 0)))
min_posts = get_input("Minimum posts", str(config.get('filter_min_posts', 0)))
min_likes = get_input("Minimum likes", str(config.get('filter_min_likes', 0)))
verified_only = get_input("Verified only? (y/n)", "n").lower() == "y"
try:
min_followers = int(min_followers)
max_followers = int(max_followers)
min_posts = int(min_posts)
min_likes = int(min_likes)
except:
print_error("Invalid numeric input")
return
config['filter_min_followers'] = min_followers
config['filter_max_followers'] = max_followers
config['filter_min_posts'] = min_posts
config['filter_min_likes'] = min_likes
config['filter_verified_only'] = verified_only
save_config(config)
results_dir = Path(config['output_dir'])
if not results_dir.exists():
print_error(f"Results directory not found: {results_dir}")
return
result_files = list(results_dir.glob("*.txt"))
result_files = [f for f in result_files if not f.name.startswith("summary_")]
if not result_files:
print_error("No account files found in results directory")
return
print()
print_info(f"Found {len(result_files)} account files")
print()
filtered_dir = Path(config.get('filtered_output_dir', 'filtered'))
filtered_dir.mkdir(exist_ok=True)
from format_converter import parse_cookie_string, cookies_dict_to_netscape, cookies_dict_to_json, extract_auth_token_from_cookies
filtered_count = 0
save_format = config.get('save_format', 'auth_token')
for result_file in result_files:
try:
content = result_file.read_text(encoding='utf-8')
lines = content.split('\n')
followers = 0
posts = 0
likes = 0
verified = False
username = ""
for line in lines:
if line.startswith("Followers:"):
parts = line.split('|')
followers_part = parts[0].replace("Followers:", "").strip().replace(",", "")
try:
followers = int(followers_part)
except:
pass
elif line.startswith("Tweets:"):
parts = line.split('|')
posts_part = parts[0].replace("Tweets:", "").strip().replace(",", "")
likes_part = parts[1].replace("Likes:", "").strip().replace(",", "")
try:
posts = int(posts_part)
likes = int(likes_part)
except:
pass
elif line.startswith("Verified:"):
verified = "Yes" in line
elif line.startswith("Username:"):
username = line.split("@")[-1].strip()
if followers >= min_followers and posts >= min_posts and likes >= min_likes:
if max_followers == 0 or followers <= max_followers:
if not verified_only or verified:
separator_index = -1
for i, line in enumerate(lines):
if line.startswith("=" * 80):
separator_index = i
if separator_index >= 0 and separator_index + 1 < len(lines):
content_data = '\n'.join(lines[separator_index + 1:]).strip()
if save_format == 'auth_token':
if content_data.startswith('#') or '\t' in content_data:
cookies_dict = {}
for line in content_data.split('\n'):
if line.startswith('#') or not line.strip():
continue
parts = line.split('\t')
if len(parts) >= 7:
cookies_dict[parts[5]] = parts[6]
auth_token = cookies_dict.get('auth_token', content_data)
elif content_data.startswith('[') or content_data.startswith('{'):
import json
try:
data = json.loads(content_data)
if isinstance(data, list):
cookies_dict = {}
for cookie in data:
if 'name' in cookie and 'value' in cookie:
cookies_dict[cookie['name']] = cookie['value']
auth_token = cookies_dict.get('auth_token', content_data)
else:
auth_token = content_data
except:
auth_token = content_data
else:
auth_token = content_data
output_file = filtered_dir / result_file.name
output_file.write_text(auth_token, encoding='utf-8')
elif save_format == 'netscape':
if content_data.startswith('[') or content_data.startswith('{'):
import json
try:
data = json.loads(content_data)
if isinstance(data, list):
cookies_dict = {}
for cookie in data:
if 'name' in cookie and 'value' in cookie:
cookies_dict[cookie['name']] = cookie['value']
else:
cookies_dict = data
netscape_content = cookies_dict_to_netscape(cookies_dict)
output_file = filtered_dir / result_file.name
output_file.write_text(netscape_content, encoding='utf-8')
except:
output_file = filtered_dir / result_file.name
output_file.write_text(content_data, encoding='utf-8')
else:
output_file = filtered_dir / result_file.name
output_file.write_text(content_data, encoding='utf-8')
elif save_format == 'json':
if content_data.startswith('#') or '\t' in content_data:
cookies_dict = {}
for line in content_data.split('\n'):
if line.startswith('#') or not line.strip():
continue
parts = line.split('\t')
if len(parts) >= 7:
cookies_dict[parts[5]] = parts[6]
json_content = cookies_dict_to_json(cookies_dict)
output_file = filtered_dir / result_file.name
output_file.write_text(json_content, encoding='utf-8')
else:
output_file = filtered_dir / result_file.name
output_file.write_text(content_data, encoding='utf-8')
filtered_count += 1
except Exception as e:
continue
print()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m FILTER RESULTS\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print(f"\033[1;37mTotal accounts scanned:\033[0m {len(result_files)}")
print(f"\033[1;32m✓ Accounts matching criteria:\033[0m {filtered_count}")
print(f"\033[1;34m📁 Filtered results saved to:\033[0m {filtered_dir.absolute()}/")
print(f"\033[1;35m💾 Save format:\033[0m {save_format}")
print("\033[1;36m" + "=" * 70 + "\033[0m")
async def extract_tokens():
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ EXTRACT AUTH TOKENS │")
print("└─────────────────────────────────────┘\033[0m\n")
results_dir = Path(config['output_dir'])
if not results_dir.exists():
print_error(f"Results directory not found: {results_dir}")
return
result_files = list(results_dir.glob("*.txt"))
result_files = [f for f in result_files if not f.name.startswith("summary_")]
if not result_files:
print_error("No account files found in results directory")
return
print_info(f"Found {len(result_files)} account files")
print()
output_file = get_input("Enter output filename", "auth_tokens.txt")
output_path = Path(output_file)
from format_converter import parse_cookie_string, extract_auth_token_from_cookies
import json
tokens = []
extracted_count = 0
for result_file in result_files:
try:
content = result_file.read_text(encoding='utf-8')
lines = content.split('\n')
separator_index = -1
for i, line in enumerate(lines):
if line.startswith("=" * 80):
separator_index = i
if separator_index >= 0 and separator_index + 1 < len(lines):
content_data = '\n'.join(lines[separator_index + 1:]).strip()
auth_token = None
if content_data.startswith('#') or '\t' in content_data:
cookies_dict = {}
for line in content_data.split('\n'):
if line.startswith('#') or not line.strip():
continue
parts = line.split('\t')
if len(parts) >= 7:
cookies_dict[parts[5]] = parts[6]
auth_token = cookies_dict.get('auth_token')
elif content_data.startswith('[') or content_data.startswith('{'):
try:
data = json.loads(content_data)
if isinstance(data, list):
cookies_dict = {}
for cookie in data:
if 'name' in cookie and 'value' in cookie:
cookies_dict[cookie['name']] = cookie['value']
auth_token = cookies_dict.get('auth_token')
else:
auth_token = data.get('auth_token')
except:
pass
else:
if 'auth_token=' in content_data or len(content_data) > 30:
if '; ' in content_data:
cookies_dict = parse_cookie_string(content_data)
auth_token = cookies_dict.get('auth_token')
else:
auth_token = content_data.strip()
if auth_token:
tokens.append(auth_token)
extracted_count += 1
except Exception as e:
continue
if tokens:
output_path.write_text('\n'.join(tokens), encoding='utf-8')
print()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m EXTRACTION RESULTS\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print(f"\033[1;37mTotal files scanned:\033[0m {len(result_files)}")
print(f"\033[1;32m✓ Tokens extracted:\033[0m {extracted_count}")
print(f"\033[1;34m📁 Saved to:\033[0m {output_path.absolute()}")
print("\033[1;36m" + "=" * 70 + "\033[0m")
else:
print_error("No auth tokens found in result files")
async def convert_cookies():
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ CONVERT COOKIE FORMAT │")
print("└─────────────────────────────────────┘\033[0m\n")
print("\033[1;33mSelect conversion:\033[0m")
print(" 1. Netscape → JSON")
print(" 2. JSON → Netscape")
print()
conversion_choice = get_input("Enter choice (1/2)")
if conversion_choice not in ["1", "2"]:
print_error("Invalid choice")
return
input_path = get_input("Enter input file/directory path")
input_location = Path(input_path).expanduser().resolve()
if not input_location.exists():
input_location = Path.cwd() / input_path
if not input_location.exists():
print_error(f"Path not found: {input_path}")
return
output_dir = get_input("Enter output directory", "converted")
output_path = Path(output_dir)
output_path.mkdir(exist_ok=True)
from cookie_parser import netscape_to_dict, json_to_dict, clean_cookie_content
from format_converter import cookies_dict_to_netscape, cookies_dict_to_json
import json
files_to_convert = []
if input_location.is_file():
files_to_convert = [input_location]
else:
files_to_convert = list(input_location.glob('*.txt')) + list(input_location.glob('*.json'))
if not files_to_convert:
print_error("No files found to convert")
return
print()
print_info(f"Found {len(files_to_convert)} file(s) to convert")
print()
converted_count = 0
for file_path in files_to_convert:
try:
content = file_path.read_text(encoding='utf-8')
cleaned_content = clean_cookie_content(content)
cookies_dict = None
if conversion_choice == "1":
try:
cookies_dict = netscape_to_dict(cleaned_content)
if cookies_dict:
json_content = cookies_dict_to_json(cookies_dict)
output_file = output_path / f"{file_path.stem}.json"
output_file.write_text(json_content, encoding='utf-8')
converted_count += 1
except Exception as e:
continue
else:
try:
cookies_dict = json_to_dict(cleaned_content)
if cookies_dict:
netscape_content = cookies_dict_to_netscape(cookies_dict)
output_file = output_path / f"{file_path.stem}.txt"
output_file.write_text(netscape_content, encoding='utf-8')
converted_count += 1
except Exception as e:
continue
except Exception as e:
continue
print()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m CONVERSION RESULTS\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print(f"\033[1;37mTotal files scanned:\033[0m {len(files_to_convert)}")
print(f"\033[1;32m✓ Files converted:\033[0m {converted_count}")
print(f"\033[1;34m📁 Saved to:\033[0m {output_path.absolute()}/")
if conversion_choice == "1":
print(f"\033[1;35m📝 Format:\033[0m Netscape → JSON")
else:
print(f"\033[1;35m📝 Format:\033[0m JSON → Netscape")
print("\033[1;36m" + "=" * 70 + "\033[0m")
async def extract_xcom_cookies_from_logs():
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ EXTRACT X.COM COOKIES FROM LOGS │")
print("└─────────────────────────────────────┘\033[0m\n")
logs_path = get_input("Enter logs directory path", "Logs")
logs_dir = Path(logs_path).expanduser().resolve()
if not logs_dir.exists():
logs_dir = Path.cwd() / logs_path
if not logs_dir.exists():
print_error(f"Logs directory not found: {logs_path}")
return
if not logs_dir.is_dir():
print_error(f"Path is not a directory: {logs_path}")
return
output_dir = get_input("Enter output directory", "extracted_xcom_cookies")
output_path = Path(output_dir)
output_path.mkdir(exist_ok=True)
print()
print_info(f"Scanning logs directory: {logs_dir}")
print()
extracted_count = 0
scanned_files = 0
for user_folder in logs_dir.iterdir():
if not user_folder.is_dir():
continue
cookies_folder = user_folder / "Cookies"
if not cookies_folder.exists() or not cookies_folder.is_dir():
continue
for cookie_file in cookies_folder.iterdir():
if not cookie_file.is_file():
continue
if cookie_file.suffix not in ['.txt', '.log', '']:
continue
try:
scanned_files += 1
content = cookie_file.read_text(encoding='utf-8', errors='ignore')
lines = content.split('\n')
all_cookies = []
for line in lines:
line = line.strip()
if not line or line.startswith('#'):
continue
parts = line.split('\t')
if len(parts) >= 7:
domain = parts[0]
if '.x.com' in domain or 'x.com' == domain or '.twitter.com' in domain or 'twitter.com' == domain:
parts[0] = parts[0].replace('.twitter.com', '.x.com')
if parts[0] == 'twitter.com':
parts[0] = 'x.com'
converted_line = '\t'.join(parts)
all_cookies.append(converted_line)
if all_cookies:
username = user_folder.name
safe_username = "".join(c if c.isalnum() or c in ('-', '_') else '_' for c in username)
output_file = output_path / f"{safe_username}.txt"
cookie_content = '\n'.join(all_cookies)
output_file.write_text(cookie_content, encoding='utf-8')
extracted_count += 1
break
except Exception as e:
continue
print()
print("\033[1;36m" + "=" * 70 + "\033[0m")
print("\033[1;37m EXTRACTION RESULTS\033[0m")
print("\033[1;36m" + "=" * 70 + "\033[0m")
print(f"\033[1;37mFiles scanned:\033[0m {scanned_files}")
print(f"\033[1;32m✓ Accounts with X.com cookies:\033[0m {extracted_count}")
print(f"\033[1;34m📁 Saved to:\033[0m {output_path.absolute()}/")
print("\033[1;36m" + "=" * 70 + "\033[0m")
async def main():
while True:
print_banner()
print_menu()
choice = get_input("Select option")
if choice == "1":
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ CHECK ACCOUNTS │")
print("└─────────────────────────────────────┘\033[0m\n")
print(f"\033[1;33mCurrent Auth Mode:\033[0m \033[1;36m{config['auth_mode']}\033[0m\n")
if config['auth_mode'] == 'cookies':
cookies_path = get_input("Enter cookies directory/file path", config['cookies_dir'])
use_proxy = get_input("Use proxies? (y/n)", "y" if config['use_proxy'] else "n").lower()
proxy_file = None
if use_proxy == "y":
proxy_file = get_input("Enter proxy file path", "proxies.txt")
print()
await run_checker(cookies_path, proxy_file)
else:
tokens_path = get_input("Enter auth tokens directory/file path", config.get('tokens_dir', 'tokens'))
use_proxy = get_input("Use proxies? (y/n)", "y" if config['use_proxy'] else "n").lower()
proxy_file = None
if use_proxy == "y":
proxy_file = get_input("Enter proxy file path", "proxies.txt")
print()
await run_checker_tokens(tokens_path, proxy_file)
input("\n\n\033[1;37mPress Enter to continue...\033[0m")
elif choice == "2":
await filter_accounts()
input("\n\n\033[1;37mPress Enter to continue...\033[0m")
elif choice == "3":
await extract_tokens()
input("\n\n\033[1;37mPress Enter to continue...\033[0m")
elif choice == "4":
await convert_cookies()
input("\n\n\033[1;37mPress Enter to continue...\033[0m")
elif choice == "5":
await extract_xcom_cookies_from_logs()
input("\n\n\033[1;37mPress Enter to continue...\033[0m")
elif choice == "6":
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ SETTINGS │")
print("└─────────────────────────────────────┘\033[0m\n")
print("\033[1;33mCurrent Settings:\033[0m")
print(f" 1. Cookies directory: \033[1;36m{config['cookies_dir']}\033[0m")
print(f" 2. Output directory: \033[1;36m{config['output_dir']}\033[0m")
print(f" 3. Cookies only mode: \033[1;36m{'Enabled' if config['cookies_only_mode'] else 'Disabled'}\033[0m")
print(f" 4. Threads: \033[1;36m{config['threads']}\033[0m")
print(f" 5. Use proxy by default: \033[1;36m{'Yes' if config['use_proxy'] else 'No'}\033[0m")
print(f" 6. Auth mode: \033[1;36m{config['auth_mode']}\033[0m")
print(f" 7. Save format: \033[1;36m{config.get('save_format', 'auth_token')}\033[0m")
print(f"\n 0. Back to main menu")
print()
setting_choice = get_input("Select setting to change")
if setting_choice == "1":
new_dir = get_input("Enter cookies directory", config['cookies_dir'])
config['cookies_dir'] = new_dir
save_config(config)
print_success(f"Cookies directory set to: {new_dir}")
await asyncio.sleep(1)
elif setting_choice == "2":
new_dir = get_input("Enter output directory", config['output_dir'])
config['output_dir'] = new_dir
save_config(config)
print_success(f"Output directory set to: {new_dir}")
await asyncio.sleep(1)
elif setting_choice == "3":
toggle = get_input("Enable cookies only mode? (y/n)", "n").lower()
config['cookies_only_mode'] = toggle == "y"
save_config(config)
print_success(f"Cookies only mode: {'Enabled' if config['cookies_only_mode'] else 'Disabled'}")
await asyncio.sleep(1)
elif setting_choice == "4":
new_threads = get_input("Enter number of threads (1-10)", str(config['threads']))
try:
num = int(new_threads)
if 1 <= num <= 10:
config['threads'] = num
save_config(config)
print_success(f"Threads set to: {num}")
else:
print_error("Value must be between 1 and 10")
except:
print_error("Invalid number")
await asyncio.sleep(1)
elif setting_choice == "5":
toggle = get_input("Use proxy by default? (y/n)", "y" if config['use_proxy'] else "n").lower()
config['use_proxy'] = toggle == "y"
save_config(config)
print_success(f"Use proxy by default: {'Yes' if config['use_proxy'] else 'No'}")
await asyncio.sleep(1)
elif setting_choice == "6":
print("\n\033[1;33mSelect authentication mode:\033[0m")
print(" 1. Cookies (Netscape/JSON format)")
print(" 2. Auth Tokens (one per line in .txt)")
mode_choice = get_input("Enter choice (1/2)")
if mode_choice == "1":
config['auth_mode'] = 'cookies'
save_config(config)
print_success("Auth mode set to: cookies")
elif mode_choice == "2":
config['auth_mode'] = 'tokens'
save_config(config)
print_success("Auth mode set to: tokens")
else:
print_error("Invalid choice")
await asyncio.sleep(1)
elif setting_choice == "7":
print("\n\033[1;33mSelect save format:\033[0m")
print(" 1. Auth Token only")
print(" 2. Netscape format")
print(" 3. JSON format")
format_choice = get_input("Enter choice (1/2/3)")
if format_choice == "1":
config['save_format'] = 'auth_token'
save_config(config)
print_success("Save format set to: auth_token")
elif format_choice == "2":
config['save_format'] = 'netscape'
save_config(config)
print_success("Save format set to: netscape")
elif format_choice == "3":
config['save_format'] = 'json'
save_config(config)
print_success("Save format set to: json")
else:
print_error("Invalid choice")
await asyncio.sleep(1)
elif choice == "7":
print_banner()
print("\n\033[1;32m┌─────────────────────────────────────┐")
print("│ ABOUT │")
print("└─────────────────────────────────────┘\033[0m\n")
print("\033[1;36mX cookies Checker\033[0m")
print("\033[1;37mDeveloped by: @CrystalStud\033[0m")
print("\n\033[1;33mFeatures:\033[0m")
print(" • Bulk cookie/token validation")
print(" • Extract X.com cookies from logs")
print(" • Account filtering (followers, posts, verified)")
print(" • Auth token extraction")
print(" • Cookie format converter (Netscape ↔ JSON)")
print(" • Multiple save formats (auth_token, netscape, json)")
print(" • Proxy support (SOCKS5/HTTP)")
print(" • Multi-threaded processing")
print(" • Detailed reporting")
print("\n\033[1;35mContact: @CrystalStud for support/buy\033[0m")
input("\n\n\033[1;37mPress Enter to continue...\033[0m")
elif choice == "0":
print_banner()
print("\n\033[1;35mThank you for using X Checker!\033[0m")
print("\033[1;36mDM @CrystalStud for support\033[0m\n")
sys.exit(0)
else:
print_error("Invalid option. Please try again.")
await asyncio.sleep(1)
if __name__ == "__main__":
asyncio.run(main())