Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import logging
import requests
LOG = logging.getLogger(__name__)
requests.packages.urllib3.disable_warnings()
class RestClient(object):
"""OMS RestClient
This is the client implementation based on "requests".
"""
_URL_TEMPLATE_PREFIX = "https://%s:8443/oms/%s"
def __init__(self, server, username, password):
"""Create a connection to the remote OMS server
:param server: IP or hostname of the OMS server
:param username: User name
:param password: Password
:return: None
import base64
import collections
import contextlib
import simplejson as json
import os
import re
import requests
import shutil
import tarfile
import tempfile
__version__ = "0.9.0"
requests.packages.urllib3.disable_warnings()
@contextlib.contextmanager
def pushd(path):
original = os.getcwd()
os.chdir(path)
yield
os.chdir(original)
class GistInfo(collections.namedtuple("GistInfo", "id public desc")):
pass
class authenticate(object):
"""
#!/usr/bin/env python
from __future__ import print_function
import docker, requests, os, sys, json, time, argparse
from dateutil.parser import parse as dateparse
from requests.packages.urllib3 import Retry
from colorama import init, deinit, Fore, Back, Style
from requests.packages.urllib3.exceptions import (
InsecureRequestWarning, InsecurePlatformWarning, SNIMissingWarning)
# These warning will always fire on older versions of python, we just want to
# ignore them for now.
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
requests.packages.urllib3.disable_warnings(InsecurePlatformWarning)
requests.packages.urllib3.disable_warnings(SNIMissingWarning)
__author__ = 'Steve McGrath '
__version__ = '1.0.1'
class APIError(Exception):
def __init__(self, code, msg):
self.code = code
self.msg = msg
def __str__(self):
return repr('[%s]: %s' % (self.code, self.msg))
#Author: @_tID
#This module requires TIDoS Framework
#https://github.com/the-Infected-Drake/TIDoS-Framework
import os
import time
import requests
import sys
import FileUtils
sys.path.append('lib/fileutils/')
from FileUtils import *
from colors import *
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
file_paths = []
dir_path = []
def check0x00(web, dirpath, headers):
try:
for dirs in dirpath:
web0x00 = web + dirs
req = requests.get(web0x00, headers=headers, allow_redirects=False, timeout=7, verify=False)
try:
if (req.headers['content-length'] is not None):
size = int(req.headers['content-length'])
else:
size = 0
except (KeyError, ValueError, TypeError):
def _new_session(retry_timeout_config):
"""
Return a new `requests.Session` object.
"""
retry = requests.packages.urllib3.Retry(
total=None,
connect=retry_timeout_config.connect_retries,
read=retry_timeout_config.read_retries,
method_whitelist=retry_timeout_config.method_whitelist,
redirect=retry_timeout_config.max_redirects)
session = requests.Session()
session.mount('https://',
requests.adapters.HTTPAdapter(max_retries=retry))
session.mount('http://',
requests.adapters.HTTPAdapter(max_retries=retry))
return session
"units2": "SEC",
"units3": "SEC",
"units4": "SEC",
"vendorid": "0"
},
"response_code": [
"200"
]
}
]
'''
import requests
from ansible.module_utils.basic import AnsibleModule, env_fallback, return_values
requests.packages.urllib3.disable_warnings()
class Netscaler(object):
"""
This is the Base Class for Netscaler modules. All methods common across several Netscaler Classes should be defined
here and inherited by the sub-class.
"""
def __init__(self, host, user, passw, use_ssl=True, verify=False, api_endpoint="", **kwargs):
"""
:param host: Type str.
The IP or resolvable hostname of the Netscaler.
:param user: Type str.
The username used to authenticate with the Netscaler.
:param passw: Type str.
The password associated with the user account.
import requests
from requests.packages.urllib3.exceptions import InsecureRequestWarning
requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
import wget
import json
from urlparse import urlparse
from bs4 import BeautifulSoup
import optparse
#Analyze metadata pdf
import PyPDF2
from PyPDF2 import PdfFileReader
#Analyze metadata docx
import docx
import datetime
#Parser arguments
import argparse
from argparse import RawTextHelpFormatter
def __init__(self,search_domain,show_expired):
self.search_domain = search_domain
self.show_expired = show_expired
self.domains = {}
self.page_token = ''
self.headers = {"User-Agent":"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36"}
self.indexUrl = 'https://transparencyreport.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch?include_subdomains=true'
self.nextUrl = 'https://transparencyreport.google.com/transparencyreport/api/v3/httpsreport/ct/certsearch/page?p='
#self.proxies = {
# 'http': 'http://127.0.0.1:1087',
# 'https': 'http://127.0.0.1:1087',
#}
requests.packages.urllib3.disable_warnings()
#!/usr/bin/python
# Tested on Python 2.7 and 3.5
import csv
import sys
import argparse
import requests
import re
import os
from termcolor import colored
from bs4 import BeautifulSoup
from config import *
requests.packages.urllib3.disable_warnings()
download_link = 'https://download.maxmind.com/app/geoip_download?edition_id=GeoLite2-ASN-CSV&license_key={}&suffix=zip'.format(license_key)
def banner():
print('''
____ ____ _ _ _ ____ ____ _ _ _ _ ___
|__| [__ |\ | | | | | | |_/ | | |__]
| | ___] | \| |___ |__| |__| | \_ |__| |
Author: Yassine Aboukir (@yassineaboukir)\n''')
def parse_args():
# parse the argument
parser = argparse.ArgumentParser(epilog='\tExample: \r\npython ' + sys.argv[0] + " -o twitter")
org = parser.add_argument('-o', '--org', help="Organization to look up", required=True)
nmapscan = parser.add_argument('-n', '--nmapscan', help="Run Nmap", required=False, action="store", nargs='?', const="-p 1-65535 -T4 -A -v")
try:
response = requests.get(url, headers=headers, timeout=timeout)
if response.ok:
page_exists = True
content = response.content
else:
error = 'HTTP error: {} {}'.format(response.status_code, response.reason.title())
except requests.exceptions.SSLError as e:
error = "SSL error: {}".format(e)
except requests.exceptions.HTTPError as e:
error = 'HTTP error: {}'.format(e)
except requests.exceptions.ConnectionError as e:
# Prettify common connection errors
if hasattr(e, 'args') and len(e.args) > 0:
if type(e.args[0]) == requests.packages.urllib3.exceptions.MaxRetryError:
reason = e.args[0].reason
if type(reason) == requests.packages.urllib3.exceptions.NewConnectionError:
if hasattr(reason, 'args') and len(reason.args) > 0:
if type(reason.args[0]) == str:
message = reason.args[0]
# Filter DNS lookup error from other connection errors
# (until https://github.com/shazow/urllib3/issues/1003 is resolved)
if ("[Errno 11001] getaddrinfo failed" in message or # Windows
"[Errno -2] Name or service not known" in message or # Linux
"[Errno 8] nodename nor servname " in message): # OS X
error = 'Connection error: DNS lookup error'
else:
error = 'Connection error{}'.format(message[message.find(':'):])
if type(reason) == requests.packages.urllib3.exceptions.ConnectTimeoutError:
if hasattr(reason, 'args') and len(reason.args) > 0:
if type(reason.args[0]) == requests.packages.urllib3.connection.HTTPConnection: