@@ -47,14 +47,14 @@ use url::Url;
47
47
#[ cfg( feature = "http" ) ]
48
48
use reqwest:: Client ;
49
49
#[ cfg( feature = "http" ) ]
50
- use reqwest:: header:: UserAgent ;
50
+ use reqwest:: header:: USER_AGENT ;
51
51
#[ cfg( feature = "http" ) ]
52
52
use reqwest:: StatusCode ;
53
53
#[ cfg( feature = "http" ) ]
54
54
use reqwest:: Response ;
55
55
56
56
#[ cfg( feature = "http" ) ]
57
- const USER_AGENT : & str = "robotparser-rs (https://crates.io/crates/robotparser)" ;
57
+ const RP_USER_AGENT : & str = "robotparser-rs (https://crates.io/crates/robotparser)" ;
58
58
59
59
/// A rule line is a single "Allow:" (allowance==True) or "Disallow:"
60
60
/// (allowance==False) followed by a path."""
@@ -255,8 +255,8 @@ impl<'a> RobotFileParser<'a> {
255
255
/// Reads the robots.txt URL and feeds it to the parser.
256
256
pub fn read ( & self ) {
257
257
let client = Client :: new ( ) ;
258
- let mut request = client. get ( self . url . clone ( ) ) ;
259
- let request = request. header ( UserAgent :: new ( USER_AGENT . to_owned ( ) ) ) ;
258
+ let request = client. get ( self . url . clone ( ) ) ;
259
+ let request = request. header ( USER_AGENT , RP_USER_AGENT . to_owned ( ) ) ;
260
260
let mut res = match request. send ( ) {
261
261
Ok ( res) => res,
262
262
Err ( _) => {
@@ -265,13 +265,13 @@ impl<'a> RobotFileParser<'a> {
265
265
} ;
266
266
let status = res. status ( ) ;
267
267
match status {
268
- StatusCode :: Unauthorized | StatusCode :: Forbidden => {
268
+ StatusCode :: UNAUTHORIZED | StatusCode :: FORBIDDEN => {
269
269
self . disallow_all . set ( true ) ;
270
270
}
271
- status if status >= StatusCode :: BadRequest && status < StatusCode :: InternalServerError => {
271
+ status if status >= StatusCode :: BAD_REQUEST && status < StatusCode :: INTERNAL_SERVER_ERROR => {
272
272
self . allow_all . set ( true ) ;
273
273
}
274
- StatusCode :: Ok => self . from_response ( & mut res) ,
274
+ StatusCode :: OK => self . from_response ( & mut res) ,
275
275
_ => { }
276
276
}
277
277
}
0 commit comments