Skip to content

Commit c895399

Browse files
committed
test
1 parent c8fcecc commit c895399

File tree

9 files changed

+514
-133
lines changed

9 files changed

+514
-133
lines changed

package.json

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,11 @@
1111
"build-dts": "tsc && prettier --write ./publish/src",
1212
"build-strict": "pnpm test-dev && pnpm build && pnpm test-pro",
1313
"start": "rollup --config script/start.mjs",
14-
"test-dev": "jest test/modal/test.ts dev --detectOpenHandles",
15-
"test-pro": "jest test/modal/test.ts pro --detectOpenHandles",
14+
"test-dev": "jest test/environment/test.ts dev --detectOpenHandles",
15+
"test-pro": "jest test/environment/test.ts pro --detectOpenHandles",
16+
"test-crawlPage": "jest test/environment/crawlPage.test.ts dev --detectOpenHandles",
17+
"test-crawlData": "jest test/environment/crawlData.test.ts dev --detectOpenHandles",
18+
"test-crawlFile": "jest test/environment/crawlFile.test.ts dev --detectOpenHandles",
1619
"prettier": "prettier --write ."
1720
},
1821
"dependencies": {

test/environment/crawlData.test.ts

Lines changed: 152 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,152 @@
1+
import process from 'node:process'
2+
import { expect, test, jest } from '@jest/globals'
3+
import chalk from 'chalk'
4+
5+
import IXCrawl from '../../src'
6+
7+
const args = process.argv.slice(3)
8+
const environment = args[0]
9+
10+
let xCrawl: typeof IXCrawl
11+
if (environment === 'dev') {
12+
xCrawl = require('../../src').default
13+
} else if (environment === 'pro') {
14+
xCrawl = require('../../publish/dist')
15+
}
16+
17+
jest.setTimeout(60000)
18+
19+
/* 1.Written */
20+
// 1.1.written string
21+
async function writtenString() {
22+
const testXCrawl = xCrawl()
23+
24+
const res = await testXCrawl.crawlData(
25+
'http://localhost:9001/api/room/193581217'
26+
)
27+
28+
return res.isSuccess
29+
}
30+
31+
// 1.2.written DataRequestConfig
32+
async function writtenDataRequestConfig() {
33+
const testXCrawl = xCrawl()
34+
35+
const res = await testXCrawl.crawlData({
36+
url: 'http://localhost:9001/api/room/193581217'
37+
})
38+
39+
return res.isSuccess
40+
}
41+
42+
// 1.3.written (string | DataRequestConfig)[]
43+
async function writtenStringAndDataRequestConfigArr() {
44+
const testXCrawl = xCrawl()
45+
46+
const res = await testXCrawl.crawlData([
47+
'http://localhost:9001/api/room/193581217',
48+
{ url: 'http://localhost:9001/api/room/193581217' }
49+
])
50+
51+
return res.reduce((prev, item) => prev && item.isSuccess, true)
52+
}
53+
54+
// 1.4.written CrawlDataConfigObject
55+
async function writtenCrawlDataConfigObject() {
56+
const testXCrawl = xCrawl()
57+
58+
const res = await testXCrawl.crawlData({
59+
requestConfigs: [
60+
'http://localhost:9001/api/room/193581217',
61+
{ url: 'http://localhost:9001/api/room/193581217' }
62+
]
63+
})
64+
65+
return res.reduce((prev, item) => prev && item.isSuccess, true)
66+
}
67+
68+
/* 2.Loader Config */
69+
// 2.1.Loader Base Config
70+
async function loaderBaseConfig() {
71+
const testXCrawl = xCrawl({
72+
baseUrl: 'http://localhost:9001/api',
73+
proxy: 'http://localhost:14892',
74+
timeout: 10000,
75+
intervalTime: { max: 1000 },
76+
maxRetry: 0
77+
})
78+
79+
const res = await testXCrawl.crawlData(['/room/193581217', '/room/193581217'])
80+
81+
return res.reduce((prev, item) => prev && item.isSuccess, true)
82+
}
83+
84+
// 2.2.Loader API Config
85+
async function loaderAPIConfig() {
86+
const testXCrawl = xCrawl({
87+
baseUrl: 'http://localhost:9001/api',
88+
proxy: 'http://localhost:14892'
89+
})
90+
91+
const res = await testXCrawl.crawlData({
92+
requestConfigs: ['/room/193581217', '/room/193581217'],
93+
timeout: 10000,
94+
intervalTime: { max: 1000 },
95+
maxRetry: 0
96+
})
97+
98+
return res.reduce((prev, item) => prev && item.isSuccess, true)
99+
}
100+
101+
test('crawlData - writtenString', async () => {
102+
console.log(
103+
chalk.bgGreen('================ crawlData - writtenString ================')
104+
)
105+
await expect(writtenString()).resolves.toBe(true)
106+
})
107+
108+
test('crawlData - writtenDataRequestConfig', async () => {
109+
console.log(
110+
chalk.bgGreen(
111+
'================ crawlData - writtenDataRequestConfig ================'
112+
)
113+
)
114+
await expect(writtenDataRequestConfig()).resolves.toBe(true)
115+
})
116+
117+
test('crawlData - writtenStringAndDataRequestConfigArr', async () => {
118+
console.log(
119+
chalk.bgGreen(
120+
'================ crawlData - writtenStringAndDataRequestConfigArr ================'
121+
)
122+
)
123+
await expect(writtenStringAndDataRequestConfigArr()).resolves.toBe(true)
124+
})
125+
126+
test('crawlData - writtenCrawlDataConfigObject', async () => {
127+
console.log(
128+
chalk.bgGreen(
129+
'================ crawlData - writtenCrawlDataConfigObject ================'
130+
)
131+
)
132+
await expect(writtenCrawlDataConfigObject()).resolves.toBe(true)
133+
})
134+
135+
/* 2.Loader Config */
136+
test('crawlData - loaderBaseConfig', async () => {
137+
console.log(
138+
chalk.bgGreen(
139+
'================ crawlData - loaderBaseConfig ================'
140+
)
141+
)
142+
await expect(loaderBaseConfig()).resolves.toBe(true)
143+
})
144+
145+
test('crawlData - loaderAPIConfig', async () => {
146+
console.log(
147+
chalk.bgGreen(
148+
'================ crawlData - loaderAPIConfig ================'
149+
)
150+
)
151+
await expect(loaderAPIConfig()).resolves.toBe(true)
152+
})

test/environment/crawlFile.test.ts

Lines changed: 149 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,149 @@
1+
import process from 'node:process'
2+
import path from 'node:path'
3+
import { expect, test, jest } from '@jest/globals'
4+
import chalk from 'chalk'
5+
6+
import IXCrawl from '../../src'
7+
8+
const args = process.argv.slice(3)
9+
const environment = args[0]
10+
11+
let xCrawl: typeof IXCrawl
12+
if (environment === 'dev') {
13+
xCrawl = require('../../src').default
14+
} else if (environment === 'pro') {
15+
xCrawl = require('../../publish/dist')
16+
}
17+
18+
jest.setTimeout(60000)
19+
20+
const requestConfig: string[] = [
21+
'https://raw.githubusercontent.com/coder-hxl/airbnb-upload/master/area/4401.jpg',
22+
'https://raw.githubusercontent.com/coder-hxl/airbnb-upload/master/area/4403.jpg'
23+
]
24+
25+
const storeDir = path.resolve(__dirname, './upload')
26+
27+
/* 1.Written */
28+
// 1.1.written CrawlFileRequestConfig string
29+
async function writtenString() {
30+
const testXCrawl = xCrawl({ proxy: 'http://localhost:14892' })
31+
32+
const res = await testXCrawl.crawlFile({
33+
requestConfig: requestConfig[0],
34+
fileConfig: { storeDir }
35+
})
36+
37+
return res.isSuccess && res.data?.data.isSuccess
38+
}
39+
40+
// 1.2.written CrawlFileRequestConfig FileRequestConfig
41+
async function writtenFileRequestConfig() {
42+
const testXCrawl = xCrawl({ proxy: 'http://localhost:14892' })
43+
44+
const res = await testXCrawl.crawlFile({
45+
requestConfig: { url: requestConfig[1] },
46+
fileConfig: { storeDir }
47+
})
48+
49+
return res.isSuccess && res.data?.data.isSuccess
50+
}
51+
52+
// 1.3.written CrawlFileRequestConfig (string | FileRequestConfig)[]
53+
async function writtenStringAndFileRequestConfigArr() {
54+
const testXCrawl = xCrawl({ proxy: 'http://localhost:14892' })
55+
56+
const res = await testXCrawl.crawlFile({
57+
requestConfig: [requestConfig[0], { url: requestConfig[1] }],
58+
fileConfig: { storeDir }
59+
})
60+
61+
return res.reduce(
62+
(prev, item) => prev && item.isSuccess && !!item.data?.data.isSuccess,
63+
true
64+
)
65+
}
66+
67+
/* 2.Loader Config */
68+
// 2.1.Loader Base Config
69+
async function loaderBaseConfig() {
70+
const testXCrawl = xCrawl({
71+
baseUrl:
72+
'https://raw.githubusercontent.com/coder-hxl/airbnb-upload/master/area',
73+
proxy: 'http://localhost:14892',
74+
timeout: 10000,
75+
intervalTime: { max: 1000 },
76+
maxRetry: 0
77+
})
78+
79+
const res = await testXCrawl.crawlFile({
80+
requestConfig: ['/4401.jpg', '/4403.jpg'],
81+
fileConfig: { storeDir }
82+
})
83+
84+
return res.reduce((prev, item) => prev && item.isSuccess, true)
85+
}
86+
87+
// 2.2.Loader API Config
88+
async function loaderAPIConfig() {
89+
const testXCrawl = xCrawl({
90+
baseUrl:
91+
'https://raw.githubusercontent.com/coder-hxl/airbnb-upload/master/area',
92+
proxy: 'http://localhost:14892'
93+
})
94+
95+
const res = await testXCrawl.crawlFile({
96+
requestConfig: ['/4401.jpg', '/4403.jpg'],
97+
timeout: 10000,
98+
fileConfig: { storeDir },
99+
intervalTime: { max: 1000 },
100+
maxRetry: 0
101+
})
102+
103+
return res.reduce((prev, item) => prev && item.isSuccess, true)
104+
}
105+
106+
/* 1.Written */
107+
test('crawlFile - writtenString', async () => {
108+
console.log(
109+
chalk.bgGreen('================ crawlFile - writtenString ================')
110+
)
111+
await expect(writtenString()).resolves.toBe(true)
112+
})
113+
114+
test('crawlFile - writtenFileRequestConfig', async () => {
115+
console.log(
116+
chalk.bgGreen(
117+
'================ crawlFile - writtenFileRequestConfig ================'
118+
)
119+
)
120+
await expect(writtenFileRequestConfig()).resolves.toBe(true)
121+
})
122+
123+
test('crawlFile - writtenStringAndFileRequestConfigArr', async () => {
124+
console.log(
125+
chalk.bgGreen(
126+
'================ crawlFile - writtenStringAndFileRequestConfigArr ================'
127+
)
128+
)
129+
await expect(writtenStringAndFileRequestConfigArr()).resolves.toBe(true)
130+
})
131+
132+
/* 2.Loader Config */
133+
test('crawlFile - loaderBaseConfig', async () => {
134+
console.log(
135+
chalk.bgGreen(
136+
'================ crawlFile - loaderBaseConfig ================'
137+
)
138+
)
139+
await expect(loaderBaseConfig()).resolves.toBe(true)
140+
})
141+
142+
test('crawlFile - loaderAPIConfig', async () => {
143+
console.log(
144+
chalk.bgGreen(
145+
'================ crawlFile - loaderAPIConfig ================'
146+
)
147+
)
148+
await expect(loaderAPIConfig()).resolves.toBe(true)
149+
})

0 commit comments

Comments
 (0)