1
- use std:: env;
1
+ //! HTTP Tracker client:
2
+ //!
3
+ //! Examples:
4
+ //!
5
+ //! `Announce` request:
6
+ //!
7
+ //! ```text
8
+ //! cargo run --bin http_tracker_client announce http://127.0.0.1:7070 9c38422213e30bff212b30c360d26f9a02136422 | jq
9
+ //! ```
10
+ //!
11
+ //! `Scrape` request:
12
+ //!
13
+ //! ```text
14
+ //! cargo run --bin http_tracker_client scrape http://127.0.0.1:7070 9c38422213e30bff212b30c360d26f9a02136422 | jq
15
+ //! ```
2
16
use std:: str:: FromStr ;
3
17
18
+ use clap:: { Parser , Subcommand } ;
4
19
use reqwest:: Url ;
5
20
use torrust_tracker:: shared:: bit_torrent:: info_hash:: InfoHash ;
6
21
use torrust_tracker:: shared:: bit_torrent:: tracker:: http:: client:: requests:: announce:: QueryBuilder ;
7
22
use torrust_tracker:: shared:: bit_torrent:: tracker:: http:: client:: responses:: announce:: Announce ;
8
23
use torrust_tracker:: shared:: bit_torrent:: tracker:: http:: client:: Client ;
9
24
25
+ #[ derive( Parser , Debug ) ]
26
+ #[ command( author, version, about, long_about = None ) ]
27
+ struct Args {
28
+ #[ command( subcommand) ]
29
+ command : Command ,
30
+ }
31
+
32
+ #[ derive( Subcommand , Debug ) ]
33
+ enum Command {
34
+ Announce { tracker_url : String , info_hash : String } ,
35
+ Scrape { tracker_url : String , info_hashes : Vec < String > } ,
36
+ }
37
+
10
38
#[ tokio:: main]
11
39
async fn main ( ) {
12
- let args: Vec < String > = env:: args ( ) . collect ( ) ;
13
- if args. len ( ) != 3 {
14
- eprintln ! ( "Error: invalid number of arguments!" ) ;
15
- eprintln ! ( "Usage: cargo run --bin http_tracker_client <HTTP_TRACKER_URL> <INFO_HASH>" ) ;
16
- eprintln ! ( "Example: cargo run --bin http_tracker_client https://tracker.torrust-demo.com 9c38422213e30bff212b30c360d26f9a02136422" ) ;
17
- std:: process:: exit ( 1 ) ;
40
+ let args = Args :: parse ( ) ;
41
+
42
+ match args. command {
43
+ Command :: Announce { tracker_url, info_hash } => {
44
+ announce_command ( tracker_url, info_hash) . await ;
45
+ }
46
+ Command :: Scrape {
47
+ tracker_url,
48
+ info_hashes,
49
+ } => {
50
+ scrape_command ( & tracker_url, & info_hashes) ;
51
+ }
18
52
}
53
+ }
19
54
20
- let base_url = Url :: parse ( & args[ 1 ] ) . expect ( "arg 1 should be a valid HTTP tracker base URL" ) ;
21
- let info_hash = InfoHash :: from_str ( & args[ 2 ] ) . expect ( "arg 2 should be a valid infohash" ) ;
55
+ async fn announce_command ( tracker_url : String , info_hash : String ) {
56
+ let base_url = Url :: parse ( & tracker_url) . expect ( "Invalid HTTP tracker base URL" ) ;
57
+ let info_hash = InfoHash :: from_str ( & info_hash) . expect ( "Invalid infohash" ) ;
22
58
23
59
let response = Client :: new ( base_url)
24
60
. announce ( & QueryBuilder :: with_default_values ( ) . with_info_hash ( & info_hash) . query ( ) )
@@ -31,5 +67,11 @@ async fn main() {
31
67
32
68
let json = serde_json:: to_string ( & announce_response) . expect ( "announce response should be a valid JSON" ) ;
33
69
34
- print ! ( "{json}" ) ;
70
+ println ! ( "{json}" ) ;
71
+ }
72
+
73
+ fn scrape_command ( tracker_url : & str , info_hashes : & [ String ] ) {
74
+ println ! ( "URL: {tracker_url}" ) ;
75
+ println ! ( "Infohashes: {info_hashes:#?}" ) ;
76
+ todo ! ( ) ;
35
77
}
0 commit comments