twitter-checker/file_utils.py
2026-01-21 04:25:00 +02:00

262 lines
9.7 KiB
Python

from pathlib import Path
from datetime import datetime
def save_in_format(user, content: str, content_type: str, output_dir: Path, save_format: str):
from format_converter import cookies_dict_to_netscape, cookies_dict_to_json, extract_auth_token_from_cookies, parse_cookie_string
output_dir.mkdir(exist_ok=True)
username = user.username
verified_status = "verified" if user.verified else "unverified"
followers = user.followers_count
posts = user.statuses_count
filename = f"{verified_status} - {followers} followers - {posts} posts - @{username}.txt"
info_file = output_dir / filename
if save_format == 'auth_token':
if content_type == 'cookies':
cookies_dict = parse_cookie_string(content)
auth_token = extract_auth_token_from_cookies(cookies_dict)
info_file.write_text(auth_token, encoding='utf-8')
else:
info_file.write_text(content, encoding='utf-8')
elif save_format == 'netscape':
if content_type == 'cookies':
cookies_dict = parse_cookie_string(content)
netscape_content = cookies_dict_to_netscape(cookies_dict)
info_file.write_text(netscape_content, encoding='utf-8')
else:
info_file.write_text(content, encoding='utf-8')
elif save_format == 'json':
if content_type == 'cookies':
cookies_dict = parse_cookie_string(content)
json_content = cookies_dict_to_json(cookies_dict)
info_file.write_text(json_content, encoding='utf-8')
else:
info_file.write_text(content, encoding='utf-8')
return info_file
def scan_cookies_directory(dir_path: str):
path = Path(dir_path).expanduser().resolve()
if not path.exists():
path = Path.cwd() / dir_path
if not path.exists():
raise FileNotFoundError(f"Path not found: {dir_path}")
if path.is_file():
return [path]
cookie_files = []
for ext in ['*.json', '*.txt']:
cookie_files.extend(path.glob(ext))
return sorted(cookie_files)
def save_account_info(user, cookie_file: Path, output_dir: Path, cleaned_cookies: str = None, cookies_only_mode: bool = False, save_format: str = 'auth_token', user_data_list=None):
if user_data_list is not None:
user_data_list.append({
'user': user,
'cookie_file': cookie_file,
'cleaned_cookies': cleaned_cookies,
'content_type': 'cookies'
})
if cookies_only_mode:
return save_in_format(user, cleaned_cookies or cookie_file.read_text(encoding='utf-8'), 'cookies', output_dir, save_format)
output_dir.mkdir(exist_ok=True)
username = user.username
verified_status = "verified" if user.verified else "unverified"
followers = user.followers_count
posts = user.statuses_count
filename = f"{verified_status} - {followers} followers - {posts} posts - @{username}.txt"
info_file = output_dir / filename
if cleaned_cookies:
cookie_content = cleaned_cookies
else:
cookie_content = cookie_file.read_text(encoding='utf-8')
verified_type = None
verified_label = None
if isinstance(user, dict):
verified_type = user.get('verified_type') or user.get('ext_verified_type')
verified_label = user.get('verified_label')
if 'legacy' in user and isinstance(user['legacy'], dict):
if not verified_type:
verified_type = user['legacy'].get('verified_type')
if 'ext_is_blue_verified' in user:
if user['ext_is_blue_verified']:
verified_type = verified_type or 'Blue'
from format_converter import parse_cookie_string, extract_auth_token_from_cookies, cookies_dict_to_netscape, cookies_dict_to_json
if save_format == 'auth_token':
from cookie_parser import read_cookies_file
cookies_string = read_cookies_file(str(cookie_file))
if cookies_string:
cookies_dict = parse_cookie_string(cookies_string)
auth_token = extract_auth_token_from_cookies(cookies_dict)
if auth_token:
cookie_content = auth_token
else:
cookie_content = cleaned_cookies or cookie_file.read_text(encoding='utf-8')
else:
cookie_content = cleaned_cookies or cookie_file.read_text(encoding='utf-8')
elif save_format == 'netscape':
from cookie_parser import read_cookies_file
cookies_string = read_cookies_file(str(cookie_file))
if cookies_string:
cookies_dict = parse_cookie_string(cookies_string)
cookie_content = cookies_dict_to_netscape(cookies_dict)
else:
cookie_content = cleaned_cookies or cookie_file.read_text(encoding='utf-8')
elif save_format == 'json':
from cookie_parser import read_cookies_file
cookies_string = read_cookies_file(str(cookie_file))
if cookies_string:
cookies_dict = parse_cookie_string(cookies_string)
cookie_content = cookies_dict_to_json(cookies_dict)
else:
cookie_content = cleaned_cookies or cookie_file.read_text(encoding='utf-8')
else:
cookie_content = cleaned_cookies or cookie_file.read_text(encoding='utf-8')
verification_parts = []
if user.verified:
verification_parts.append("Verified: Yes")
if verified_type:
verification_parts.append(f"Type: {verified_type}")
if verified_label:
verification_parts.append(f"Label: {verified_label}")
else:
verification_parts.append("Verified: No")
verification_line = " | ".join(verification_parts)
try:
if isinstance(user.created_at, str):
from dateutil import parser
created_date = parser.parse(user.created_at)
created_str = created_date.strftime("%d.%m.%Y %H:%M")
else:
created_str = user.created_at.strftime("%d.%m.%Y %H:%M")
except:
created_str = str(user.created_at)
cookie_content = cookie_content.replace('.twitter.com', '.x.com').replace('twitter.com', 'x.com')
info_lines = [
"=" * 80,
f"Checked: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
"",
f"ID: {user.id} | Name: {user.name} | Username: @{username}",
f"Followers: {user.followers_count:,} | Following: {user.friends_count:,}",
f"Tweets: {user.statuses_count:,} | Likes: {user.favourites_count:,}",
verification_line,
f"Created: {created_str}",
f"Profile: https://x.com/{username}",
"",
f"Cookie file: {cookie_file.absolute()}",
"",
"=" * 80,
cookie_content,
]
info_file.write_text('\n'.join(info_lines), encoding='utf-8')
return info_file
def save_account_info_token(user, auth_token: str, token_source: str, output_dir: Path, cookies_only_mode: bool = False, save_format: str = 'auth_token', user_data_list=None):
if user_data_list is not None:
user_data_list.append({
'user': user,
'auth_token': auth_token,
'token_source': token_source,
'content_type': 'token'
})
if cookies_only_mode:
return save_in_format(user, auth_token, 'token', output_dir, save_format)
output_dir.mkdir(exist_ok=True)
username = user.username
verified_status = "verified" if user.verified else "unverified"
followers = user.followers_count
posts = user.statuses_count
filename = f"{verified_status} - {followers} followers - {posts} posts - @{username}.txt"
info_file = output_dir / filename
if cookies_only_mode:
info_file.write_text(auth_token, encoding='utf-8')
return info_file
verified_type = None
verified_label = None
if isinstance(user, dict):
verified_type = user.get('verified_type') or user.get('ext_verified_type')
verified_label = user.get('verified_label')
if 'legacy' in user and isinstance(user['legacy'], dict):
if not verified_type:
verified_type = user['legacy'].get('verified_type')
if 'ext_is_blue_verified' in user:
if user['ext_is_blue_verified']:
verified_type = verified_type or 'Blue'
verification_parts = []
if user.verified:
verification_parts.append("Verified: Yes")
if verified_type:
verification_parts.append(f"Type: {verified_type}")
if verified_label:
verification_parts.append(f"Label: {verified_label}")
else:
verification_parts.append("Verified: No")
verification_line = " | ".join(verification_parts)
try:
if isinstance(user.created_at, str):
from dateutil import parser
created_date = parser.parse(user.created_at)
created_str = created_date.strftime("%d.%m.%Y %H:%M")
else:
created_str = user.created_at.strftime("%d.%m.%Y %H:%M")
except:
created_str = str(user.created_at)
info_lines = [
"=" * 80,
f"Checked: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}",
"",
f"ID: {user.id} | Name: {user.name} | Username: @{username}",
f"Followers: {user.followers_count:,} | Following: {user.friends_count:,}",
f"Tweets: {user.statuses_count:,} | Likes: {user.favourites_count:,}",
verification_line,
f"Created: {created_str}",
f"Profile: https://x.com/{username}",
"",
f"Token source: {token_source}",
"",
"=" * 80,
auth_token,
]
info_file.write_text('\n'.join(info_lines), encoding='utf-8')
return info_file