44- Make requests to the domains retrieved from a file
55- Recursively download the site when an open directory hosting a file with the desired file extension
66
7- 3 positional arguments needed:
7+ 1 positional arguments needed:
88- Input File : Path to the file containing URLs
9- - File Extension : 7z, apk, bat, bz, bz2, crypt, dll, doc, docx, exe, gz, hta, iso, jar, json, lnk, ppt, ps1, py, rar, sfx, sh, tar, vb, vbs, xld, xls, xlsx, zip
109
1110Optional arguments:
12- - --file-dir : Directory to use for interesting files detected (default: ./InterestingFiles/)
13- - --kit-dir : Directory to use for phishing kits detected (default: ./KitJackinSeason/)
11+ - --directory : Save data to CAP_DIR (default: ./Captures/)
1412- --level : Recursion depth (default=1, infinite=0)
1513- --quiet : Don't show wget output
1614- --threads : Numbers of threads to spawn
17- - --timeout : Set time to wait for a connection
15+ - --timeout : Set the connection timeout to TIMEOUT
1816- --tor : Download files via the Tor network
1917- --very-verbose : Show error messages
2018
2119Usage:
2220```
23- python aa_adhoc.py <INPUT_FILE> <FILE_EXTENSION> [--file-dir] [--kit-dir ] [--level] [--quiet] [--threads] [--timeout] [--tor] [--very-verbose]
21+ python aa_adhoc.py <INPUT_FILE> [--directory ] [--level] [--quiet] [--threads] [--timeout] [--tor] [--very-verbose]
2422```
2523
2624Debugger: open("/tmp/aa.txt", "a").write("{}: <MSG>\n ".format(<VAR>))
3634parser .add_argument (metavar = "input file" ,
3735 dest = "input_file" ,
3836 help = "Path to the file containing URLs" )
39- parser .add_argument (metavar = "file extension" ,
40- dest = "ext" ,
41- choices = ["7z" , "apk" , "bat" , "bz" , "bz2" , "crypt" , "dll" , "doc" , "docx" , "exe" , "gz" , "hta" , "iso" , "jar" , "json" , "lnk" , "ppt" , "ps1" , "py" , "rar" , "sfx" , "sh" , "tar" , "vb" , "vbs" , "xld" , "xls" , "xlsx" , "zip" ],
42- help = "7z, apk, bat, bz, bz2, crypt, dll, doc, docx, exe, gz, hta, iso, jar, json, lnk, ppt, ps1, py, rar, sfx, sh, tar, vb, vbs, xld, xls, xlsx, zip" )
43- parser .add_argument ("--file-dir" ,
44- dest = "file_dir" ,
45- default = "./InterestingFile/" ,
37+ parser .add_argument ("--directory" ,
38+ dest = "cap_dir" ,
39+ default = "./Captures/" ,
4640 required = False ,
47- help = "Directory to use for interesting files detected (default: ./InterestingFiles)" )
48- parser .add_argument ("--kit-dir" ,
49- dest = "kit_dir" ,
50- default = "./KitJackinSeason/" ,
51- required = False ,
52- help = "Directory to use for phishing kits detected (default: ./KitJackinSeason)" )
41+ help = "Download data to CAP_DIR (default: ./Captures)" )
5342parser .add_argument ("--level" ,
5443 dest = "level" ,
55- default = 0 ,
44+ default = 1 ,
5645 required = False ,
5746 type = str ,
5847 help = "Directory depth (default=1, infinite=0" )
7261 default = 30 ,
7362 required = False ,
7463 type = int ,
75- help = "Set time to wait for a connection " )
64+ help = "Set the connection timeout to TIMEOUT " )
7665parser .add_argument ("--tor" ,
7766 dest = "tor" ,
7867 action = "store_true" ,
8372 action = "store_true" ,
8473 required = False ,
8574 help = "Show error messages" )
86- args = parser .parse_args ()
87- uagent = "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko"
88-
8975# Fix directory names
90- args = commons .fix_directory (args )
76+ args = commons .fix_directory (parser . parse_args () )
9177
9278def main ():
9379 """ """
@@ -96,36 +82,28 @@ def main():
9682
9783 # Print start messages
9884 commons .show_summary (args )
99- commons .show_networking (args , uagent )
85+ commons .show_networking (args ) # globals: proxies, torsocks
10086
101- # Read suspicious .yaml
102- suspicious = commons .read_suspicious (args )
87+ # Read config .yaml
88+ commons .read_config (args ) # globals: config
10389
10490 # Recompile exclusions
105- commons .recompile_exclusions ()
106-
107- # Build dict of extensions
108- extensions = {}
109- extensions .update (suspicious ["archives" ])
110- extensions .update (suspicious ["files" ])
111-
112- # Read file containing URLs
113- urls = commons .read_file (args .input_file )
91+ commons .recompile_exclusions () # globals: exclusions
11492
11593 # Create queues
116- recursion_queue = commons .create_queue ("recursion_queue " )
94+ url_queue = commons .create_queue ("url_queue " )
11795
11896 # Create threads
119- commons .RecursiveQueueManager (args , recursion_queue , uagent , extensions )
97+ commons .UrlQueueManager (args , url_queue )
98+
99+ # Read file containing URLs
100+ urls = commons .read_file (args .input_file )
120101
121102 # Process URLs
122103 for url in urls :
123- if not (url .startswith ("http://" ) or url .startswith ("https://" )):
124- continue
125-
126- recursion_queue .put (url )
104+ url_queue .put (url )
127105
128- recursion_queue .join ()
106+ url_queue .join ()
129107 return
130108
131109if __name__ == "__main__" :
0 commit comments