From 382332a2e77c6a51b8dd1198148992c455a846c2 Mon Sep 17 00:00:00 2001 From: 0xflotus <0xflotus@gmail.com> Date: Tue, 3 Dec 2024 14:43:17 +0100 Subject: [PATCH 1/2] fix: small typo errors --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index dbd3011..5faaff8 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ SpideyX - A Web Reconnaissance Penetration Testing tool for Penetration Testers and Ethical Hackers that included with multiple -mode with asynchronous concurrne performance. Spideyx is a tool that have 3 seperate modes and each mode are used for different +mode with asynchronous concurrent performance. Spideyx is a tool that have 3 seperate modes and each mode are used for different approach and different methods, Spideyx is one tool but it equal to 3 tools because it have ability to crawling, Jsscraping, parameter fuzzing. --- @@ -29,7 +29,7 @@ git clone https://github.com/RevoltSecurities/SpideyX cd Spideyx pip install . ``` -these are methods that install spideyx succesfully into your machiene and ready to execute, but how to use the spideyx +these are methods that install spideyx successfully into your machine and ready to execute, but how to use the spideyx --- @@ -82,7 +82,7 @@ spideyx -h ### SpideyX Modes: - SpideyX have 3 different mode that used for 3 different purposes and thse are the modes of SpideX: + SpideyX have 3 different mode that used for 3 different purposes and these are the modes of SpideX: - crawler - jsscrapy - paramfuzzer @@ -157,7 +157,7 @@ spideyx crawler -h [scope]: -hic, --host-include : specify hosts to include urls of it and show in results with comma seperated values (ex: -hc api.google.com,admin.google.com) - -hex, --host-exclude : speify hosts to exclude urls of it and show in results with comma seperated values (ex: -hex class.google.com,nothing.google.com) + -hex, --host-exclude : specify hosts to exclude urls of it and show in results with comma seperated values (ex: -hex class.google.com,nothing.google.com) -cs, --crawl-scope : specify the inscope url to be crawled by spideyx (ex: -cs /api/products or -cs inscope.txt) -cos, --crawl-out-scope : specify the outscope url to be not crawled by spideyx (ex: -cos /api/products or -cos outscope.txt) @@ -231,7 +231,7 @@ spideyx crawler -site https://hackerone.com -em .js,.jsp,.asp,.apsx,.php ``` --- -and you can filter extension using falgs: `-ef`, `--extension-filters` or spideyx have predefined blacklist of extensions not to crawl or include in output. +and you can filter extension using flags: `-ef`, `--extension-filters` or spideyx have predefined blacklist of extensions not to crawl or include in output. ```sh spideyx crawler -site https://hackerone.com -ef .css,.woff,.woff2,.mp3,.mp4,.pdf @@ -297,7 +297,7 @@ Controlling your scope with SpideyX is very easy using: `-cs` & `--crawl-scope` ```sh spideyx crawler -site https://random.hackerone.com -cs /wp-admin ``` -and for multiple inscope deinition you can pass the file `scope.txt` +and for multiple inscope definition you can pass the file `scope.txt` ```sh cat scope.txt /admin/api/v1 @@ -321,7 +321,7 @@ spideyx crawler -site https://random.hackerone.com -cos outscope.txt #### Spideyx crawl mode concurrency: -Spidex concurrency can be controlled to instruct spidey how much concurrent, delay and paralellism to used +Spidex concurrency can be controlled to instruct spidey how much concurrent, delay and parallelism to used when crawling and this can be done easily in spideyx crawl mode! --- From c952e9f829aefe27a35ec85cc5e73a4ae5dfbc65 Mon Sep 17 00:00:00 2001 From: 0xflotus <0xflotus@gmail.com> Date: Tue, 3 Dec 2024 14:44:47 +0100 Subject: [PATCH 2/2] fix: small typo error --- spideyx/modules/help/help.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spideyx/modules/help/help.py b/spideyx/modules/help/help.py index 20e9cf1..9e3b81b 100644 --- a/spideyx/modules/help/help.py +++ b/spideyx/modules/help/help.py @@ -79,7 +79,7 @@ def crawler_help(): [{bold}{blue}scope{reset}{bold}{white}]: -hic, --host-include : specify hosts to include urls of it and show in results with comma seperated values (ex: -hc api.google.com,admin.google.com) - -hex, --host-exclude : speify hosts to exclude urls of it and show in results with comma seperated values (ex: -hex class.google.com,nothing.google.com) + -hex, --host-exclude : specify hosts to exclude urls of it and show in results with comma seperated values (ex: -hex class.google.com,nothing.google.com) -cs, --crawl-scope : specify the inscope url to be crawled by spideyx (ex: -cs /api/products or -cs inscope.txt) -cos, --crawl-out-scope : specify the outscope url to be not crawled by spideyx (ex: -cos /api/products or -cos outscope.txt) @@ -204,4 +204,4 @@ def update_help(): [{bold}{blue}commands{reset}{bold}{white}]: -sup, --show-update : shows latest version updates of spideyX - -lt, --latest : updates the spideyx to latest version{reset}""") \ No newline at end of file + -lt, --latest : updates the spideyx to latest version{reset}""")