One liners

#Get CIDR & Org Information from Target Lists

for HOST in $(cat subs.txt);do echo $(for ip in $(dig a $HOST +short); do whois $ip | grep -e "CIDR\|Organization" | tr -s " " | paste - -; done | uniq); done
subfinder -d ibm.com -all -silent | httpx --title -mc 200 --threads 300

#Find Every domain for a target company using crt.sh

curl -s https://crt.sh/\?o\=Tesla\&output\=json > tesla.txt

cat testa.txt | jq -r '.[].common_name'

#parse out wildcard domains

cat tesla.txt | jq -r '.[].common_name' | sed 's/\*//g'

#parse unique values
cat tesla.txt | jq -r '.[].common_name' | sed 's/\*//g' | sort -u | wc -l

#Check reverse DNS with REV

cat tesla.txt | jq -r '.[].common_name' | sed 's/\*//g' | sort -u | rev | cut -d '.' -f 1,2 | rev

#Subdomains

curl -s https://crt.sh/\?q\=target.com\&output\=json | jq -r '.[].name_value' | grep -Po '(\w+\.\w+\.\w+)$' | anew > subdomains.txt

Use crt.sh against a domain list with a bash script

#!/bin/bash

# Check if the domains.txt file exists
if [ ! -f "domains.txt" ]; then
    echo "domains.txt not found."
    exit 1
fi

# Loop through each domain in domains.txt
while IFS= read -r domain; do
    # Execute the curl command for the current domain
    curl_output=$(curl -s "https://crt.sh/?q=${domain}&output=json")

    # Extract subdomains using jq
    subdomains=$(echo "$curl_output" | jq -r '.[].name_value' | grep -Po '(\w+\.\w+\.\w+)$')

    # Save subdomains to subdomains.txt
    echo "$subdomains" >> subdomains.txt

done < "domains.txt"

# Remove duplicate subdomains
sort -u -o subdomains.txt subdomains.txt

Last updated