Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def load_data(file_name, state=None, **kw):
data = json.loads(
open(os.path.join(os.path.dirname(__file__), "data", file_name)).read()
)
if state:
data.update(state)
if kw:
data.update(kw)
return data
def instance(state=None, file="ec2-instance.json", **kw):
return load_data(file, state, **kw)
class Instance(Bag):
pass
class Reservation(Bag):
pass
class Client(object):
def __init__(self, instances):
self.instances = instances
self.filters = None
def get_all_instances(self, filters=None):
self.filters = filters
return [Reservation({"instances": [i for i in self.instances]})]
def test_master_log_handler(self):
session_factory = self.replay_flight_data('test_log_handler')
ctx = Bag(session_factory=session_factory,
options=Bag(account_id='001100', region='us-east-1'),
policy=Bag(name='test', resource_type='ec2'))
log_output = output.log_outputs.select(
'aws://master/custodian?region=us-east-2', ctx)
stream = log_output.get_handler()
self.assertTrue(stream.log_group == 'custodian')
self.assertTrue(stream.log_stream == '001100/us-east-1/test')
def get_azure_output(self, custom_pyformat=None):
output_dir = "azure://mystorage.blob.core.windows.net/logs"
if custom_pyformat:
output_dir = AzureStorageOutput.join(output_dir, custom_pyformat)
output = AzureStorageOutput(
ExecutionContext(
None,
Bag(name="xyz", provider_name='azure'),
Config.empty(output_dir=output_dir)
),
{'url': output_dir},
)
self.addCleanup(shutil.rmtree, output.root_dir)
return output
def test_app_insights_logs(self):
policy = Bag(name='test', resource_type='azure.vm', session_factory=Session)
ctx = Bag(policy=policy, execution_id='00000000-0000-0000-0000-000000000000')
with log_outputs.select('azure://00000000-0000-0000-0000-000000000000', ctx) as log:
self.assertTrue(isinstance(log, AppInsightsLogOutput))
logging.getLogger('custodian.test').warning('test message')
def test_metrics_destination_dims(self):
tmetrics = []
class Metrics(aws.MetricsOutput):
def _put_metrics(self, ns, metrics):
tmetrics.extend(metrics)
conf = Bag({'region': 'us-east-2', 'scheme': 'aws', 'netloc': 'master'})
ctx = Bag(session_factory=None,
options=Bag(account_id='001100', region='us-east-1'),
policy=Bag(name='test', resource_type='ec2'))
moutput = Metrics(ctx, conf)
moutput.put_metric('Calories', 400, 'Count', Scope='Policy', Food='Pizza')
moutput.flush()
tmetrics[0].pop('Timestamp')
self.assertEqual(tmetrics, [{
'Dimensions': [{'Name': 'Policy', 'Value': 'test'},
{'Name': 'ResType', 'Value': 'ec2'},
{'Name': 'Food', 'Value': 'Pizza'},
{'Name': 'Region', 'Value': 'us-east-1'},
{'Name': 'Account', 'Value': '001100'}],
'MetricName': 'Calories',
'Unit': 'Count',
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from mock import Mock
from c7n.config import Bag
from c7n.exceptions import PolicyValidationError
from c7n.resources import aws
from c7n import output
from .common import BaseTest
class TraceDoc(Bag):
def serialize(self):
return json.dumps(dict(self))
class OutputXrayTracerTest(BaseTest):
def test_emitter(self):
emitter = aws.XrayEmitter()
emitter.client = m = Mock()
doc = TraceDoc({'good': 'morning'})
emitter.send_entity(doc)
emitter.flush()
m.put_trace_segments.assert_called_with(
TraceSegmentDocuments=[doc.serialize()])
response = requests.post(
github_url, headers=headers,
json={'query': query, 'variables': {'organization': organization}})
result = response.json()
if response.status_code != 200 or 'errors' in result:
raise Exception(
"Query failed to run by returning code of {}. {}".format(
response.status_code, response.content))
now = datetime.utcnow().replace(tzinfo=tzutc())
stats = Counter()
repo_metrics = RepoMetrics(
Bag(session_factory=SessionFactory(region, assume_role=assume)),
{'namespace': DEFAULT_NAMESPACE}
)
for r in result['data']['organization']['repositories']['nodes']:
commits = jmespath.search(
'pullRequests.edges[].node[].commits[].nodes[].commit[]', r)
if not commits:
continue
log.debug("processing repo: %s prs: %d", r['name'], len(commits))
repo_metrics.dims = {
'Hook': hook_context,
'Repo': '{}/{}'.format(organization, r['name'])}
# Each commit represents a separate pr
for c in commits:
process_commit(c, r, repo_metrics, stats, since, now)
def parse_url_config(url):
if url and '://' not in url:
url += "://"
conf = config.Bag()
parsed = urlparse.urlparse(url)
for k in ('scheme', 'netloc', 'path'):
conf[k] = getattr(parsed, k)
for k, v in urlparse.parse_qs(parsed.query).items():
conf[k] = v[0]
conf['url'] = url
return conf