How to use the awscli.customizations.s3.fileinfo.FileInfo function in awscli

To help you get started, we’ve selected a few awscli examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_can_submit(self):
        fileinfo = FileInfo(
            src=self.bucket+'/'+self.key, dest=self.filename,
            operation_name='download')
        self.assertTrue(
            self.transfer_request_submitter.can_submit(fileinfo))
        fileinfo.operation_name = 'foo'
        self.assertFalse(
            self.transfer_request_submitter.can_submit(fileinfo))
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_dry_run(self):
        self.cli_params['dryrun'] = True
        self.transfer_request_submitter = UploadRequestSubmitter(
            self.transfer_manager, self.result_queue, self.cli_params)
        fileinfo = FileInfo(
            src=self.filename, src_type='local', operation_name='upload',
            dest=self.bucket + '/' + self.key, dest_type='s3')
        self.transfer_request_submitter.submit(fileinfo)

        result = self.result_queue.get()
        self.assertIsInstance(result, DryRunResult)
        self.assertEqual(result.transfer_type, 'upload')
        self.assertTrue(result.src.endswith(self.filename))
        self.assertEqual(result.dest, 's3://' + self.bucket + '/' + self.key)
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_submit_with_expected_size_provided(self):
        provided_size = 100
        self.cli_params['expected_size'] = provided_size
        fileinfo = FileInfo(
            src=self.filename, dest=self.bucket+'/'+self.key)
        self.transfer_request_submitter.submit(fileinfo)
        upload_call_kwargs = self.transfer_manager.upload.call_args[1]

        ref_subscribers = [
            ProvideSizeSubscriber,
            UploadStreamResultSubscriber
        ]
        actual_subscribers = upload_call_kwargs['subscribers']
        self.assertEqual(len(ref_subscribers), len(actual_subscribers))
        for i, actual_subscriber in enumerate(actual_subscribers):
            self.assertIsInstance(actual_subscriber, ref_subscribers[i])
        # The ProvideSizeSubscriber should be providing the correct size
        self.assertEqual(actual_subscribers[0].size, provided_size)
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_loc_delete(self):
        """
        Test delete local file tasks.  The local files are the same
        generated from filegenerator_test.py.
        """
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for filename in files:
            self.assertTrue(os.path.exists(filename))
            tasks.append(FileInfo(
                src=filename, src_type='local',
                dest_type='s3', operation_name='delete', size=0,
                client=self.client))
        ref_calls = []
        self.assert_operations_for_s3_handler(self.s3_handler, tasks,
                                              ref_calls)
        for filename in files:
            self.assertFalse(os.path.exists(filename))
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_upload_fail(self):
        """
        One of the uploads will fail to upload in this test as
        the second s3 destination's bucket does not exist.
        """
        fail_s3_files = [self.bucket + '/text1.txt',
                         self.bucket[:-1] + '/another_directory/text2.txt']
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(FileInfo(
                src=self.loc_files[i],
                dest=fail_s3_files[i],
                compare_key=None,
                src_type='local',
                dest_type='s3',
                operation_name='upload', size=0,
                last_update=None,
                client=self.client))
        # Since there is only one parsed response. The process will fail
        # becasue it is expecting one more response.
        self.parsed_responses = [
            {'ETag': '"120ea8a25e5d487bf68b5f7096440019"'},
        ]
        stdout, stderr, rc = self.run_s3_handler(self.s3_handler, tasks)
        self.assertEqual(rc.num_tasks_failed, 1)
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_enqueue_downloads(self):
        fileinfos = []
        num_transfers = 5
        for _ in range(num_transfers):
            fileinfos.append(
                FileInfo(src='bucket/key', dest='filename',
                         compare_key='key',
                         operation_name='download'))

        self.s3_transfer_handler.call(fileinfos)
        self.assertEqual(
            self.transfer_manager.download.call_count, num_transfers)
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_enqueue_copies(self):
        fileinfos = []
        num_transfers = 5
        for _ in range(num_transfers):
            fileinfos.append(
                FileInfo(src='sourcebucket/sourcekey', dest='bucket/key',
                         compare_key='key',
                         operation_name='copy'))

        self.s3_transfer_handler.call(fileinfos)
        self.assertEqual(
            self.transfer_manager.copy.call_count, num_transfers)
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_move(self):
        # Create file info objects to perform move.
        tasks = []
        for i in range(len(self.s3_files)):
            tasks.append(FileInfo(
                src=self.s3_files[i], src_type='s3',
                dest=self.s3_files2[i], dest_type='s3',
                operation_name='move', size=0,
                client=self.client, source_client=self.source_client))
        ref_calls = [
            ('CopyObject',
             {'Bucket': self.bucket2, 'Key': 'text1.txt',
              'CopySource': self.bucket + '/text1.txt', 'ACL': 'private',
              'ContentType': 'text/plain'}),
            ('DeleteObject', {'Bucket': self.bucket, 'Key': 'text1.txt'}),
            ('CopyObject',
             {'Bucket': self.bucket2, 'Key': 'another_directory/text2.txt',
              'CopySource': self.bucket + '/another_directory/text2.txt',
              'ACL': 'private', 'ContentType': 'text/plain'}),
            ('DeleteObject',
             {'Bucket': self.bucket, 'Key': 'another_directory/text2.txt'}),
github aws / aws-cli / tests / unit / customizations / s3 / test_s3handler.py View on Github external
def test_enqueue_local_deletes(self):
        fileinfos = []
        num_transfers = 5
        for _ in range(num_transfers):
            fileinfos.append(
                FileInfo(src='myfile', dest=None, operation_name='delete',
                         src_type='local'))

        self.s3_transfer_handler.call(fileinfos)
        # The number of processed results will be equal to:
        # number_of_local_deletes * 2 + 1
        # The 2 represents the QueuedResult and SuccessResult/FailureResult
        # for each transfer
        # The 1 represents the TotalFinalSubmissionResult
        self.assertEqual(len(self.processed_results), 11)

        # Make sure that the results are as expected by checking just one
        # of them
        first_submitted_result = self.processed_results[0]
        self.assertEqual(first_submitted_result.transfer_type, 'delete')
        self.assertTrue(first_submitted_result.src.endswith('myfile'))
github aws / aws-cli / awscli / customizations / s3 / subcommands.py View on Github external
fgen_request_parameters = \
            self._get_file_generator_request_parameters_skeleton()
        self._map_request_payer_params(fgen_request_parameters)
        self._map_sse_c_params(fgen_request_parameters, paths_type)
        fgen_kwargs['request_parameters'] = fgen_request_parameters

        rgen_request_parameters =  \
            self._get_file_generator_request_parameters_skeleton()
        self._map_request_payer_params(rgen_request_parameters)
        rgen_kwargs['request_parameters'] = rgen_request_parameters

        file_generator = FileGenerator(**fgen_kwargs)
        rev_generator = FileGenerator(**rgen_kwargs)
        stream_dest_path, stream_compare_key = find_dest_path_comp_key(files)
        stream_file_info = [FileInfo(src=files['src']['path'],
                                     dest=stream_dest_path,
                                     compare_key=stream_compare_key,
                                     src_type=files['src']['type'],
                                     dest_type=files['dest']['type'],
                                     operation_name=operation_name,
                                     client=self._client,
                                     is_stream=True)]
        file_info_builder = FileInfoBuilder(
            self._client, self._source_client, self.parameters)

        s3_transfer_handler = S3TransferHandlerFactory(
            self.parameters, self._runtime_config)(
                self._client, result_queue)

        sync_strategies = self.choose_sync_strategies()