Skip to content

Commit

Permalink
Issue #122: Use configuration class for click parameters
Browse files Browse the repository at this point in the history
  • Loading branch information
Nekmo committed Aug 8, 2023
1 parent bbd2fc8 commit 9f0ed72
Show file tree
Hide file tree
Showing 2 changed files with 72 additions and 101 deletions.
60 changes: 60 additions & 0 deletions dirhunt/configuration.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
from dataclasses import dataclass, field
from typing import TypedDict, List, Optional, Dict


class ConfigurationDict(TypedDict):
"""Configuration dict for Dirhunt. The keys are the same as the
command line arguments of Dirhunt. See the management.py file for more
information.
"""

urls: List[str]
threads: int
exclude_flags: List[str]
include_flags: List[str]
interesting_extensions: List[str]
interesting_files: List[str]
interesting_keywords: List[str]
stdout_flags: List[str]
progress_enabled: bool
timeout: int
max_depth: int
not_follow_subdomains: bool
exclude_sources: List[str]
proxies: List[str]
delay: int
not_allow_redirects: bool
limit: int
to_file: Optional[str]
user_agent: Optional[str]
cookies: Dict[str, str]
headers: Dict[str, str]


@dataclass
class Configuration:
"""Configuration class for Dirhunt. The keys are the same as the ConfigurationDict
class.
"""

urls: List[str] = field(default_factory=list)
threads: int = 10
exclude_flags: List[str] = field(default_factory=list)
include_flags: List[str] = field(default_factory=list)
interesting_extensions: List[str] = field(default_factory=list)
interesting_files: List[str] = field(default_factory=list)
interesting_keywords: List[str] = field(default_factory=list)
stdout_flags: List[str] = field(default_factory=list)
progress_enabled: bool = True
timeout: int = 10
max_depth: int = 3
not_follow_subdomains: bool = False
exclude_sources: List[str] = field(default_factory=list)
proxies: List[str] = field(default_factory=list)
delay: int = 0
not_allow_redirects: bool = False
limit: int = 1000
to_file: Optional[str] = None
user_agent: Optional[str] = None
cookies: Dict[str, str] = field(default_factory=dict)
headers: Dict[str, str] = field(default_factory=dict)
113 changes: 12 additions & 101 deletions dirhunt/management.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from click import BadOptionUsage, Path, BadParameter

from dirhunt.configuration import ConfigurationDict, Configuration
from dirhunt.crawler import Crawler
from dirhunt.exceptions import DirHuntError, catch, IncompatibleVersionError
from dirhunt.output import output_urls
Expand Down Expand Up @@ -180,10 +181,10 @@ def flags_range(flags):
help="Return only in stdout the urls of these flags",
)
@click.option("--progress-enabled/--progress-disabled", default=None)
@click.option("--timeout", default=10)
@click.option("--timeout", default=Configuration.timeout)
@click.option(
"--max-depth",
default=3,
default=Configuration.max_depth,
help="Maximum links to follow without increasing directories depth",
)
@click.option(
Expand All @@ -205,7 +206,7 @@ def flags_range(flags):
@click.option(
"-d",
"--delay",
default=0,
default=Configuration.delay,
type=float,
help="Delay between requests to avoid bans by the server",
)
Expand All @@ -215,7 +216,7 @@ def flags_range(flags):
@click.option(
"--limit",
type=int,
default=1000,
default=Configuration.limit,
help="Max number of pages processed to search for directories.",
)
@click.option(
Expand Down Expand Up @@ -250,105 +251,15 @@ def flags_range(flags):
@click.option(
"--version", is_flag=True, callback=print_version, expose_value=False, is_eager=True
)
def hunt(
urls,
threads,
exclude_flags,
include_flags,
interesting_extensions,
interesting_files,
interesting_keywords,
stdout_flags,
progress_enabled,
timeout,
max_depth,
not_follow_subdomains,
exclude_sources,
proxies,
delay,
not_allow_redirects,
limit,
to_file,
user_agent,
cookies,
headers,
):
def hunt(**kwargs: ConfigurationDict):
"""Find web directories without bruteforce"""
if exclude_flags and include_flags:
raise BadOptionUsage(
"--exclude-flags and --include-flags are mutually exclusive."
)
welcome()
urls = flat_list(urls)
proxies = multiplier_args(proxies)
if not urls:
click.echo(
"•_•) OOPS! Add urls to analyze.\nFor example: dirhunt http://domain/path\n\n"
"Need help? Then use dirhunt --help",
err=True,
)
return
exclude_flags, include_flags = flags_range(exclude_flags), flags_range(
include_flags
)
progress_enabled = (
(sys.stdout.isatty() or sys.stderr.isatty())
if progress_enabled is None
else progress_enabled
)
crawler = Crawler(
max_workers=threads,
interesting_extensions=interesting_extensions,
interesting_files=interesting_files,
interesting_keywords=interesting_keywords,
std=sys.stdout if sys.stdout.isatty() else sys.stderr,
progress_enabled=progress_enabled,
timeout=timeout,
depth=max_depth,
not_follow_subdomains=not_follow_subdomains,
exclude_sources=exclude_sources,
not_allow_redirects=not_allow_redirects,
proxies=proxies,
delay=delay,
limit=limit,
to_file=to_file,
user_agent=user_agent,
cookies=cookies,
headers=headers,
)
crawler.add_init_urls(*urls)
if os.path.exists(crawler.get_resume_file()):
click.echo("Resuming the previous program execution...")
try:
crawler.resume(crawler.get_resume_file())
except IncompatibleVersionError as e:
click.echo(e)
while True:
choice = catch_keyboard_interrupt_choices(
crawler.print_results, ["abort", "continue", "results"], "a"
)(set(exclude_flags), set(include_flags))
if choice == "a":
crawler.close(True)
click.echo(
'Created resume file "{}". Run again using the same parameters to resume.'.format(
crawler.get_resume_file()
)
)
return
elif choice == "c":
crawler.restart()
continue
else:
break
crawler.print_urls_info()
if not sys.stdout.isatty():
output_urls(crawler, stdout_flags)
if to_file:
crawler.create_report(to_file)
if not to_file and os.path.exists(crawler.get_resume_file()):
# The resume file exists. Deleting...
os.remove(crawler.get_resume_file())
configuration = Configuration(**kwargs)
pass


def main():
catch(hunt)()


if __name__ == "__main__":
main()

0 comments on commit 9f0ed72

Please sign in to comment.