用R爬取多页数据时,为什么爬取几页以后就没有不能继续了?

library(XML)
library(RCurl)
library(rvest)
library(Rwebdriver)
library(RJSONIO)
library(RSelenium)
library(stringr)
library(xlsx)

打开浏览器

remDr <- remoteDriver(
browserName = "firefox",
remoteServerAddr = "localhost",
port = 4444L
)
remDr$open()

X <- data.frame()
for(i in 1:80){

B <- ((paste("https://you.ctrip.com/restaurantlist/lanzhou231/s0-r799-p",i,".html", sep = "")))
## 打开网页
url <- B
remDr$navigate(url)
#提取页面
web<-read_html(url,encoding="UTF-8")
position<-web %>% html_nodes("div.list_mod2 ") %>% html_text()
#调整格式

for (i in 1:15) {
test <- (gsub("\n","",position[i]))
test2 <- str_split_fixed(test," ",807 )#空格拆分字符串
A <- data.frame(Name=test2[357],Address=test2[541],Average=test2[589],Value=test2[629],Comment=test2[717])
X <- rbind(X,A)
}
print(X)

#Sys.sleep(2)
}
print(X)
remDr$close() # 关闭浏览器

https://blog.csdn.net/Hooah_/article/details/89578777