How to use the unicodecsv.QUOTE_ALL function in unicodecsv

To help you get started, we’ve selected a few unicodecsv examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github XLSForm / pyxform / pyxform / utils.py View on Github external
def sheet_to_csv(workbook_path, csv_path, sheet_name):
    from pyxform.xls2json_backends import xls_value_to_unicode

    wb = xlrd.open_workbook(workbook_path)
    try:
        sheet = wb.sheet_by_name(sheet_name)
    except xlrd.biffh.XLRDError:
        return False
    if not sheet or sheet.nrows < 2:
        return False
    with open(csv_path, "wb") as f:
        writer = csv.writer(f, quoting=csv.QUOTE_ALL)
        mask = [v and len(v.strip()) > 0 for v in sheet.row_values(0)]
        for row_idx in range(sheet.nrows):
            csv_data = []
            try:
                for v, m in zip(sheet.row(row_idx), mask):
                    if m:
                        value = v.value
                        value_type = v.ctype
                        data = xls_value_to_unicode(value, value_type, wb.datemode)
                        # clean the values of leading and trailing whitespaces
                        data = data.strip()
                        csv_data.append(data)
            except TypeError:
                continue
            writer.writerow(csv_data)
github digitalgreenorg / dg / activities / management / commands / bluefrog_pest.py View on Github external
def handle(self, *args, **options):
		#read xml from url
		req = requests.get('http://45.127.101.204/DG_API/AP_MIS.svc/GetPestDetails', auth=(settings.BLUEFROG_API_USERNAME, settings.BLUEFROG_API_PASSWORD))
		xml_file = open("ap/pest_tag.xml", 'w')
		xml_file.write(req.content)
		xml_file.close()
		# partner=Partner.objects.get(id=50)
		csv_file = open('ap/pest_tag.csv', 'wb')
		wtr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
		# tree = ET.parse('ap/crop.xml')
		# root = tree.getroot()
		try:
			data = json.loads(req.json(), strict=False)
		except Exception as e:
			print e
		state = State.objects.get(id=6)
		user_obj = User.objects.get(username="apvideo")
		for data_iterable in data:
			tag_code = data_iterable.get('pest_id')
			tag_name = data_iterable.get('pest_name')
			tag_regional_name = data_iterable.get('pest_name_telugu')
			

			try:
				tag_obj, created = \
github openstack / cliff / cliff / formatters / commaseparated.py View on Github external
from .base import ListFormatter
from cliff import columns

import six

if sys.version_info[0] == 3:
    import csv
else:
    import unicodecsv as csv


class CSVLister(ListFormatter):

    QUOTE_MODES = {
        'all': csv.QUOTE_ALL,
        'minimal': csv.QUOTE_MINIMAL,
        'nonnumeric': csv.QUOTE_NONNUMERIC,
        'none': csv.QUOTE_NONE,
    }

    def add_argument_group(self, parser):
        group = parser.add_argument_group('CSV Formatter')
        group.add_argument(
            '--quote',
            choices=sorted(self.QUOTE_MODES.keys()),
            dest='quote_mode',
            default='nonnumeric',
            help='when to include quotes, defaults to nonnumeric',
        )

    def emit_list(self, column_names, data, stdout, parsed_args):
github digitalgreenorg / dg / activities / management / commands / bluefrog_person.py View on Github external
def handle(self, *args, **options):
		#read xml from url
		district_code_list = ['0101', '0102', '0103', '0104', '0105', '0106', '0107', '0108', '0109', '0110', '0111', '0112', '0113']

		for item in district_code_list:
			req = requests.get('http://45.127.101.204/DG_API/AP_MIS.svc/GetFarmerDetails/?District_ID=%s' % item, auth=(settings.BLUEFROG_API_USERNAME, settings.BLUEFROG_API_PASSWORD))
			xml_file = open("ap/person_%s.xml" % item, 'w')
			xml_file.write(req.content)
			xml_file.close()
			partner=Partner.objects.get(id=72)
			csv_file = open('ap/person_error_%s.csv' %item, 'wb')
			wtr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
			# tree = ET.parse('ap/person.xml')
			# root = tree.getroot()
			data = json.loads(req.json())
			state = State.objects.get(id=6)
			user_obj = User.objects.get(username="apvideo")
			district_data_list = []
			for data_iterable in data:
				person_code = data_iterable.get('ID')
				person_name = data_iterable.get('Name')
				gender = data_iterable.get('Gender')
				if gender == "Male":
					gender = "M"
				else:
					gender = "F"
				age = data_iterable.get('Age', None)
				father_name = data_iterable.get('Father/Husband Name')
github digitalgreenorg / dg / activities / management / commands / bluefrog_screening.py View on Github external
def handle(self, *args, **options):
		#read xml from url
		req = requests.get('http://45.127.101.204/DG_API/AP_MIS.svc/GetPicoDisseminationDetails', auth=(settings.BLUEFROG_API_USERNAME, settings.BLUEFROG_API_PASSWORD))
		xml_file = open("ap/screening.xml", 'w')
		xml_file.write(req.content)
		xml_file.close()
		partner=Partner.objects.get(id=72)
		csv_file = open('ap/screening.csv', 'wb')
		wtr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
		# tree = ET.parse('ap/screening.xml')
		# root = tree.getroot()
		try:
			data = json.loads(req.json(), strict=False)
		except Exception as e:
			pass
		# state = State.objects.get(id=6)
		user_obj = User.objects.get(username="apvideo")
		district_data_list = []

		for data_iterable in data:
			screening_code = data_iterable.get('ID')
			start_date = data_iterable.get('Date of Screening')
			start_time = data_iterable.get('Date & Time of Data Entry')
			district_code = data_iterable.get('District ID')
			block_code = data_iterable.get('Date of Screening')
github scrapehero / yellowpages-scraper / yellow_pages.py View on Github external
argparser = argparse.ArgumentParser()
    argparser.add_argument('keyword', help='Search Keyword')
    argparser.add_argument('place', help='Place Name')

    args = argparser.parse_args()
    keyword = args.keyword
    place = args.place

    scraped_data = parse_listing(keyword, place)

    if scraped_data:
        print("Writing scraped data to %s-%s-yellowpages-scraped-data.csv" % (keyword, place))
        with open('%s-%s-yellowpages-scraped-data.csv' % (keyword, place), 'wb') as csvfile:
            fieldnames = ['rank', 'business_name', 'telephone', 'business_page', 'category', 'website', 'rating',
                          'street', 'locality', 'region', 'zipcode', 'listing_url']
            writer = csv.DictWriter(csvfile, fieldnames=fieldnames, quoting=csv.QUOTE_ALL)
            writer.writeheader()
            for data in scraped_data:
                writer.writerow(data)
github unicefuganda / eums / eums / services / csv_export_service.py View on Github external
def generate(cls, data, category, filename):
        file_location = settings_export.EXPORTS_DIR + category + '/' + filename
        export_file = open(file_location, 'wb')
        export_file.write('sep=,\n')
        uwr = unicodecsv.writer(export_file, quoting=unicodecsv.QUOTE_ALL)
        uwr.writerows(data)
github digitalgreenorg / dg / loop / helpline_view.py View on Github external
def write_log(log_file,module,log):
    curr_india_time = datetime.datetime.now(timezone('Asia/Kolkata'))
    with open(log_file, 'ab') as csvfile:
        file_write = csv.writer(csvfile, quoting=csv.QUOTE_ALL)
        file_write.writerow([curr_india_time,module,log])
github woosmap / woosmap-samples / python-samples / csv_to_woosmap / csv_to_woosmap.py View on Github external
import time
import requests
from hashlib import sha1

YOUR_INPUT_CSV_FILE = 'foodmarkets.csv'
WOOSMAP_PRIVATE_API_KEY = '23713926-1af5-4321-ba54-032966f6e95d'
BATCH_SIZE = 5


class MyCSVDialect(csv.Dialect):
    delimiter = ','
    quotechar = '"'
    doublequote = True
    skipinitialspace = False
    lineterminator = '\n'
    quoting = csv.QUOTE_ALL


class Woosmap:
    """A wrapper around the Woosmap Data API."""

    WOOSMAP_API_HOSTNAME = 'api.woosmap.com'

    def __init__(self):
        self.session = requests.Session()

    def delete(self):
        self.session.delete('https://{hostname}/stores/'.format(hostname=self.WOOSMAP_API_HOSTNAME),
                            params={'private_key': WOOSMAP_PRIVATE_API_KEY})

    def post(self, payload):
        return self.session.post('https://{hostname}/stores/'.format(hostname=self.WOOSMAP_API_HOSTNAME),
github digitalgreenorg / dg / activities / management / commands / jslps_videos_nn.py View on Github external
def handle(self, *args, **options):
		#saving videos
		file_url = 'http://webservicesri.swalekha.in/Service.asmx/GetExportVedioMasterData'+'?pUsername=%s&pPassword=%s' % (settings.JSLPS_USERNAME, settings.JSLPS_PASSWORD)
		url = urllib2.urlopen(file_url)
		contents = url.read()
		xml_file = open("jslps_data_integration_files/video.xml", 'w')
		xml_file.write(contents)
		xml_file.close()

		partner = Partner.objects.get(id = 24)
		csv_file = open('jslps_data_integration_files/videos_error.csv', 'wb')
		wtr = csv.writer(csv_file, quoting=csv.QUOTE_ALL)
		tree = ET.parse('jslps_data_integration_files/video.xml')
		root = tree.getroot()
		for c in root.findall('VedioMasterData'):
			vdc = c.find('VideoID').text
			vn = c.find('VideoTitle').text
			vt = int(c.find('VideoType').text)
			if c.find('Category') is not None: 
				cg = int(c.find('Category').text)
			else:
				cg = None
				jslps.other_error_count += 1
				wtr.writerow(['Can not save video without category',vdc,'title', vn, e])
				continue
			if c.find('SubCategory') is not None: 
				scg = int(c.find('SubCategory').text)
			else: