Skip to content

Commit

Permalink
feat: 代理IP功能 Done
Browse files Browse the repository at this point in the history
  • Loading branch information
NanmiCoder committed Dec 7, 2023
1 parent c530bd4 commit 1cec23f
Show file tree
Hide file tree
Showing 9 changed files with 103 additions and 92 deletions.
2 changes: 1 addition & 1 deletion base/base_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

class AbstractCrawler(ABC):
@abstractmethod
def init_config(self, platform: str, login_type: str, account_pool: AccountPool, crawler_type: str):
def init_config(self, platform: str, login_type: str, crawler_type: str):
pass

@abstractmethod
Expand Down
11 changes: 10 additions & 1 deletion config/base_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,10 @@
CRAWLER_TYPE = "search"

# 是否开启 IP 代理
ENABLE_IP_PROXY = False
ENABLE_IP_PROXY = True

# 代理IP池数量
IP_PROXY_POOL_COUNT = 2

# 重试时间
RETRY_INTERVAL = 60 * 30 # 30 minutes
Expand Down Expand Up @@ -49,4 +52,10 @@
"7280854932641664319",
"7202432992642387233"
# ........................
]


# 指定快手平台需要爬取的ID列表
KS_SPECIFIED_ID_LIST = [

]
4 changes: 0 additions & 4 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,9 +38,6 @@ async def main():
parser.add_argument('--type', type=str, help='crawler type (search | detail)',
choices=["search", "detail"], default=config.CRAWLER_TYPE)

# init account pool
account_pool = proxy_account_pool.create_account_pool()

# init db
if config.IS_SAVED_DATABASED:
await db.init_db()
Expand All @@ -50,7 +47,6 @@ async def main():
crawler.init_config(
platform=args.platform,
login_type=args.lt,
account_pool=account_pool,
crawler_type=args.type
)
await crawler.start()
Expand Down
41 changes: 21 additions & 20 deletions media_platform/bilibili/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
import config
from base.base_crawler import AbstractCrawler
from models import bilibili
from proxy.proxy_account_pool import AccountPool
from proxy.proxy_ip_pool import create_ip_pool, IpInfoModel
from tools import utils
from var import comment_tasks_var, crawler_type_var

Expand All @@ -31,27 +31,30 @@ class BilibiliCrawler(AbstractCrawler):
crawler_type: str
context_page: Page
bili_client: BilibiliClient
account_pool: AccountPool
browser_context: BrowserContext

def __init__(self):
self.index_url = "https://www.bilibili.com"
self.user_agent = utils.get_user_agent()

def init_config(self, platform: str, login_type: str, account_pool: AccountPool, crawler_type: str):
def init_config(self, platform: str, login_type: str, crawler_type: str):
self.platform = platform
self.login_type = login_type
self.account_pool = account_pool
self.crawler_type = crawler_type

async def start(self):
account_phone, playwright_proxy, httpx_proxy = self.create_proxy_info()
playwright_proxy_format, httpx_proxy_format = None, None
if config.ENABLE_IP_PROXY:
ip_proxy_pool = await create_ip_pool(config.IP_PROXY_POOL_COUNT, enable_validate_ip=True)
ip_proxy_info: IpInfoModel = await ip_proxy_pool.get_proxy()
playwright_proxy_format, httpx_proxy_format = self.format_proxy_info(ip_proxy_info)

async with async_playwright() as playwright:
# Launch a browser context.
chromium = playwright.chromium
self.browser_context = await self.launch_browser(
chromium,
playwright_proxy,
None,
self.user_agent,
headless=config.HEADLESS
)
Expand All @@ -61,11 +64,11 @@ async def start(self):
await self.context_page.goto(self.index_url)

# Create a client to interact with the xiaohongshu website.
self.bili_client = await self.create_bilibili_client(httpx_proxy)
self.bili_client = await self.create_bilibili_client(httpx_proxy_format)
if not await self.bili_client.pong():
login_obj = BilibiliLogin(
login_type=self.login_type,
login_phone=account_phone,
login_phone="", # your phone number
browser_context=self.browser_context,
context_page=self.context_page,
cookie_str=config.COOKIES
Expand Down Expand Up @@ -134,20 +137,18 @@ async def create_bilibili_client(self, httpx_proxy: Optional[str]) -> BilibiliCl
)
return bilibili_client_obj

def create_proxy_info(self) -> Tuple[Optional[str], Optional[Dict], Optional[str]]:
"""Create proxy info for playwright and httpx"""
# phone: 13012345671 ip_proxy: 111.122.xx.xx1:8888
phone, ip_proxy = self.account_pool.get_account()
if not config.ENABLE_IP_PROXY:
return phone, None, None
utils.logger.info("Begin proxy info for playwright and httpx ...")
@staticmethod
def format_proxy_info(ip_proxy_info: IpInfoModel) -> Tuple[Optional[Dict], Optional[Dict]]:
"""format proxy info for playwright and httpx"""
playwright_proxy = {
"server": f"{config.IP_PROXY_PROTOCOL}{ip_proxy}",
"username": config.IP_PROXY_USER,
"password": config.IP_PROXY_PASSWORD,
"server": f"{ip_proxy_info.protocol}{ip_proxy_info.ip}:{ip_proxy_info.port}",
"username": ip_proxy_info.user,
"password": ip_proxy_info.password,
}
httpx_proxy = {
f"{ip_proxy_info.protocol}{ip_proxy_info.ip}": f"{ip_proxy_info.protocol}{ip_proxy_info.user}:{ip_proxy_info.password}@{ip_proxy_info.ip}:{ip_proxy_info.port}"
}
httpx_proxy = f"{config.IP_PROXY_PROTOCOL}{config.IP_PROXY_USER}:{config.IP_PROXY_PASSWORD}@{ip_proxy}"
return phone, playwright_proxy, httpx_proxy
return playwright_proxy, httpx_proxy

async def launch_browser(
self,
Expand Down
41 changes: 21 additions & 20 deletions media_platform/douyin/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import config
from base.base_crawler import AbstractCrawler
from models import douyin
from proxy.proxy_account_pool import AccountPool
from proxy.proxy_ip_pool import create_ip_pool, IpInfoModel
from tools import utils
from var import crawler_type_var

Expand All @@ -24,27 +24,30 @@ class DouYinCrawler(AbstractCrawler):
crawler_type: str
context_page: Page
dy_client: DOUYINClient
account_pool: AccountPool
browser_context: BrowserContext

def __init__(self) -> None:
self.user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/109.0.0.0 Safari/537.36" # fixed
self.index_url = "https://www.douyin.com"

def init_config(self, platform: str, login_type: str, account_pool: AccountPool, crawler_type: str) -> None:
def init_config(self, platform: str, login_type: str, crawler_type: str) -> None:
self.platform = platform
self.login_type = login_type
self.account_pool = account_pool
self.crawler_type = crawler_type

async def start(self) -> None:
account_phone, playwright_proxy, httpx_proxy = self.create_proxy_info()
playwright_proxy_format, httpx_proxy_format = None, None
if config.ENABLE_IP_PROXY:
ip_proxy_pool = await create_ip_pool(config.IP_PROXY_POOL_COUNT, enable_validate_ip=True)
ip_proxy_info: IpInfoModel = await ip_proxy_pool.get_proxy()
playwright_proxy_format, httpx_proxy_format = self.format_proxy_info(ip_proxy_info)

async with async_playwright() as playwright:
# Launch a browser context.
chromium = playwright.chromium
self.browser_context = await self.launch_browser(
chromium,
playwright_proxy,
None,
self.user_agent,
headless=config.HEADLESS
)
Expand All @@ -53,11 +56,11 @@ async def start(self) -> None:
self.context_page = await self.browser_context.new_page()
await self.context_page.goto(self.index_url)

self.dy_client = await self.create_douyin_client(httpx_proxy)
self.dy_client = await self.create_douyin_client(httpx_proxy_format)
if not await self.dy_client.pong(browser_context=self.browser_context):
login_obj = DouYinLogin(
login_type=self.login_type,
login_phone=account_phone,
login_phone="", # you phone number
browser_context=self.browser_context,
context_page=self.context_page,
cookie_str=config.COOKIES
Expand Down Expand Up @@ -148,20 +151,18 @@ async def get_comments(self, aweme_id: str, semaphore: asyncio.Semaphore, max_co
except DataFetchError as e:
utils.logger.error(f"aweme_id: {aweme_id} get comments failed, error: {e}")

def create_proxy_info(self) -> Tuple[Optional[str], Optional[Dict], Optional[str]]:
"""Create proxy info for playwright and httpx"""
if not config.ENABLE_IP_PROXY:
return None, None, None

# phone: 13012345671 ip_proxy: 111.122.xx.xx1:8888
phone, ip_proxy = self.account_pool.get_account() # type: ignore
@staticmethod
def format_proxy_info(ip_proxy_info: IpInfoModel) -> Tuple[Optional[Dict], Optional[Dict]]:
"""format proxy info for playwright and httpx"""
playwright_proxy = {
"server": f"{config.IP_PROXY_PROTOCOL}{ip_proxy}",
"username": config.IP_PROXY_USER,
"password": config.IP_PROXY_PASSWORD,
"server": f"{ip_proxy_info.protocol}{ip_proxy_info.ip}:{ip_proxy_info.port}",
"username": ip_proxy_info.user,
"password": ip_proxy_info.password,
}
httpx_proxy = {
f"{ip_proxy_info.protocol}{ip_proxy_info.ip}": f"{ip_proxy_info.protocol}{ip_proxy_info.user}:{ip_proxy_info.password}@{ip_proxy_info.ip}:{ip_proxy_info.port}"
}
httpx_proxy = f"{config.IP_PROXY_PROTOCOL}{config.IP_PROXY_USER}:{config.IP_PROXY_PASSWORD}@{ip_proxy}"
return phone, playwright_proxy, httpx_proxy
return playwright_proxy, httpx_proxy

async def create_douyin_client(self, httpx_proxy: Optional[str]) -> DOUYINClient:
"""Create douyin client"""
Expand Down
41 changes: 21 additions & 20 deletions media_platform/kuaishou/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import config
from base.base_crawler import AbstractCrawler
from models import kuaishou
from proxy.proxy_account_pool import AccountPool
from proxy.proxy_ip_pool import create_ip_pool, IpInfoModel
from tools import utils
from var import comment_tasks_var, crawler_type_var

Expand All @@ -26,27 +26,30 @@ class KuaishouCrawler(AbstractCrawler):
crawler_type: str
context_page: Page
ks_client: KuaiShouClient
account_pool: AccountPool
browser_context: BrowserContext

def __init__(self):
self.index_url = "https://www.kuaishou.com"
self.user_agent = utils.get_user_agent()

def init_config(self, platform: str, login_type: str, account_pool: AccountPool, crawler_type: str):
def init_config(self, platform: str, login_type: str, crawler_type: str):
self.platform = platform
self.login_type = login_type
self.account_pool = account_pool
self.crawler_type = crawler_type

async def start(self):
account_phone, playwright_proxy, httpx_proxy = self.create_proxy_info()
playwright_proxy_format, httpx_proxy_format = None, None
if config.ENABLE_IP_PROXY:
ip_proxy_pool = await create_ip_pool(config.IP_PROXY_POOL_COUNT, enable_validate_ip=True)
ip_proxy_info: IpInfoModel = await ip_proxy_pool.get_proxy()
playwright_proxy_format, httpx_proxy_format = self.format_proxy_info(ip_proxy_info)

async with async_playwright() as playwright:
# Launch a browser context.
chromium = playwright.chromium
self.browser_context = await self.launch_browser(
chromium,
playwright_proxy,
None,
self.user_agent,
headless=config.HEADLESS
)
Expand All @@ -56,11 +59,11 @@ async def start(self):
await self.context_page.goto(f"{self.index_url}?isHome=1")

# Create a client to interact with the kuaishou website.
self.ks_client = await self.create_ks_client(httpx_proxy)
self.ks_client = await self.create_ks_client(httpx_proxy_format)
if not await self.ks_client.pong():
login_obj = KuaishouLogin(
login_type=self.login_type,
login_phone=account_phone,
login_phone=httpx_proxy_format,
browser_context=self.browser_context,
context_page=self.context_page,
cookie_str=config.COOKIES
Expand Down Expand Up @@ -179,20 +182,18 @@ async def get_comments(self, video_id: str, semaphore: asyncio.Semaphore):
await self.context_page.goto(f"{self.index_url}?isHome=1")
await self.ks_client.update_cookies(browser_context=self.browser_context)

def create_proxy_info(self) -> Tuple[Optional[str], Optional[Dict], Optional[str]]:
"""Create proxy info for playwright and httpx"""
# phone: 13012345671 ip_proxy: 111.122.xx.xx1:8888
phone, ip_proxy = self.account_pool.get_account()
if not config.ENABLE_IP_PROXY:
return phone, None, None
utils.logger.info("Begin proxy info for playwright and httpx ...")
@staticmethod
def format_proxy_info(ip_proxy_info: IpInfoModel) -> Tuple[Optional[Dict], Optional[Dict]]:
"""format proxy info for playwright and httpx"""
playwright_proxy = {
"server": f"{config.IP_PROXY_PROTOCOL}{ip_proxy}",
"username": config.IP_PROXY_USER,
"password": config.IP_PROXY_PASSWORD,
"server": f"{ip_proxy_info.protocol}{ip_proxy_info.ip}:{ip_proxy_info.port}",
"username": ip_proxy_info.user,
"password": ip_proxy_info.password,
}
httpx_proxy = {
f"{ip_proxy_info.protocol}{ip_proxy_info.ip}": f"{ip_proxy_info.protocol}{ip_proxy_info.user}:{ip_proxy_info.password}@{ip_proxy_info.ip}:{ip_proxy_info.port}"
}
httpx_proxy = f"{config.IP_PROXY_PROTOCOL}{config.IP_PROXY_USER}:{config.IP_PROXY_PASSWORD}@{ip_proxy}"
return phone, playwright_proxy, httpx_proxy
return playwright_proxy, httpx_proxy

async def create_ks_client(self, httpx_proxy: Optional[str]) -> KuaiShouClient:
"""Create xhs client"""
Expand Down
Loading

0 comments on commit 1cec23f

Please sign in to comment.