Skip to content

Commit

Permalink
0.0.5
Browse files Browse the repository at this point in the history
  • Loading branch information
m8sec committed Jul 31, 2019
1 parent 4a362c7 commit 62e0d77
Show file tree
Hide file tree
Showing 6 changed files with 207 additions and 10 deletions.
11 changes: 11 additions & 0 deletions .idea/crosslinked.iml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions .idea/misc.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 8 additions & 0 deletions .idea/modules.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 6 additions & 0 deletions .idea/vcs.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

174 changes: 174 additions & 0 deletions .idea/workspace.xml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 4 additions & 10 deletions crosslinked.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,13 @@
#!/usr/bin/env python3
# Author: @m8r0wn

import argparse
import requests
from sys import exit
from time import sleep
from re import compile
from requests import get
from random import choice
from threading import Thread
from bs4 import BeautifulSoup
from urllib3 import disable_warnings, exceptions
disable_warnings(exceptions.InsecureRequestWarning)
requests.packages.urllib3.disable_warnings()

USER_AGENTS = [line.strip() for line in open('user_agents.txt')]

Expand Down Expand Up @@ -55,9 +52,6 @@ def search(self, search_engine, company_name, timeout, jitter):
return self.linkedin

def name_search(self, search_engine, count, company_name, jitter):
# Regex to extract link
HTTP = compile("http([^\)]+){}([^\)]+)".format(company_name))
HTTPS = compile("https([^\)]+){}([^\)]+)".format(company_name))
# Search for links in HTML
url = self.URL[search_engine].format(company_name, count)
print("[*] {} : {}".format(self.name_count, url))
Expand Down Expand Up @@ -144,7 +138,7 @@ def get_request(link, timeout):
'DNT': '1',
'Connection': 'keep-alive',
'Upgrade-Insecure-Requests': '1'}
return get(link, headers=head, verify=False, timeout=timeout)
return requests.get(link, headers=head, verify=False, timeout=timeout)

def email_formatter(nformat, first, last):
name = nformat
Expand Down Expand Up @@ -200,4 +194,4 @@ def main(args):
main(args)
except KeyboardInterrupt:
print("[!] Key event detected, closing...")
exit(0)
exit(0)

0 comments on commit 62e0d77

Please sign in to comment.