MySQL export to CSV file as UTF-8 via Python script

我只是一个虾纸丫 提交于 2019-11-29 22:53:11

问题


I'm able to export a MySQL table into a CSV file via Python csv module but there are no utf-8 characters. (example: ???? chars insted of ąöę).

The table data is in utf-8 format (phpMyAdmin let me see correct data).

I found some information that in Python all data should be decoded in utf-8 and then encoded into CSV in utf-8 via for example unicodewritter (because the native csv module doesn't support Unicode correctly).

I tried a lot but no success.

Question : Is there any example script to export MySQL database in utf-8 to CSV file in utf-8 format in Python?

I use ubuntu 14.04 and there is a problem with mysql.connector so I use MySQLdb with Gord Thompson code :

# -*- coding: utf-8 -*-
import csv
import MySQLdb
from UnicodeSupportForCsv import UnicodeWriter
import sys
reload(sys)  
sys.setdefaultencoding('utf8')
#sys.setdefaultencoding('Cp1252')

conn = MySQLdb.Connection(db='sampledb', host='localhost',           
user='sampleuser', passwd='samplepass')

crsr = conn.cursor()
crsr.execute("SELECT * FROM rfid")
with open(r'test.csv', 'wb') as csvfile:
    uw = UnicodeWriter(
    csvfile, delimiter=',',
    quotechar='"', quoting=csv.QUOTE_MINIMAL)
for row in crsr.fetchall():
    uw.writerow([unicode(col) for col in row])

Error still exist : UnicodeDecodeError: 'utf8' codec can't decode byte 0xf3 in position 2: invalid continuation byte


回答1:


MySQL is great in converting character sets. But you need to tell it to set up a connection using the correct collation.

On default it returns how it is put into the database. Add the required charset to the connection:

conn = MySQLdb.Connection(db='sampledb', host='localhost',           
user='sampleuser', passwd='samplepass', charset='utf-8', )

Is this helpful?




回答2:


This works for me with Python 2.7.5 and MySQL Connector/Python 2.0.4:

# -*- coding: utf-8 -*-
import csv
import mysql.connector
from UnicodeSupportForCsv import UnicodeWriter

conn = mysql.connector.connect(
    host='localhost', port=3307,
    user='root', password='whatever',
    database='mydb')
crsr = conn.cursor()
crsr.execute("SELECT * FROM vocabulary")
with open(r'C:\Users\Gord\Desktop\test.csv', 'wb') as csvfile:
    uw = UnicodeWriter(
        csvfile, delimiter=',',
        quotechar='"', quoting=csv.QUOTE_MINIMAL)
    for row in crsr.fetchall():
        uw.writerow([unicode(col) for col in row])

The UnicodeWriter class is taken directly from the last example on the documentation page for the csv module, which I stored in a file named "UnicodeSupportForCsv.py":

import csv, codecs, cStringIO

class UTF8Recoder:
    """
    Iterator that reads an encoded stream and reencodes the input to UTF-8
    """
    def __init__(self, f, encoding):
        self.reader = codecs.getreader(encoding)(f)

    def __iter__(self):
        return self

    def next(self):
        return self.reader.next().encode("utf-8")

class UnicodeReader:
    """
    A CSV reader which will iterate over lines in the CSV file "f",
    which is encoded in the given encoding.
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        f = UTF8Recoder(f, encoding)
        self.reader = csv.reader(f, dialect=dialect, **kwds)

    def next(self):
        row = self.reader.next()
        return [unicode(s, "utf-8") for s in row]

    def __iter__(self):
        return self

class UnicodeWriter:
    """
    A CSV writer which will write rows to CSV file "f",
    which is encoded in the given encoding.
    """

    def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
        # Redirect output to a queue
        self.queue = cStringIO.StringIO()
        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
        self.stream = f
        self.encoder = codecs.getincrementalencoder(encoding)()

    def writerow(self, row):
        self.writer.writerow([s.encode("utf-8") for s in row])
        # Fetch UTF-8 output from the queue ...
        data = self.queue.getvalue()
        data = data.decode("utf-8")
        # ... and reencode it into the target encoding
        data = self.encoder.encode(data)
        # write to the target stream
        self.stream.write(data)
        # empty queue
        self.queue.truncate(0)

    def writerows(self, rows):
        for row in rows:
            self.writerow(row)



回答3:


Finaly it Works! Thanks to : Gord Thompson and Prikkeldraad. Thanks Guys !

# -*- coding: utf-8 -*-
import csv
import MySQLdb
from UnicodeSupportForCsv import UnicodeWriter
import sys
reload(sys)  
sys.setdefaultencoding('utf8')
#sys.setdefaultencoding('Cp1252')

conn = MySQLdb.Connection(db='testdb', host='localhost', user='testuser', passwd='testpasswd', use_unicode=0,charset='utf8')

crsr = conn.cursor()
crsr.execute("SELECT * FROM rfid")

with open(r'test.csv', 'wb') as csvfile:
    uw = UnicodeWriter(
        csvfile, delimiter=',',quotechar='"', quoting=csv.QUOTE_MINIMAL)

    for row in crsr.fetchall():
        uw.writerow([unicode(col) for col in row])



回答4:


Try this one ..make easy for you

https://github.com/jdunck/python-unicodecsv

The unicodecsv is a drop-in replacement for Python 2.7's csv module which supports unicode strings without a hassle. Supported versions are python 2.6, 2.7, 3.3, 3.4, 3.5, and pypy 2.4.0.

>>> import unicodecsv as csv
>>> from io import BytesIO
>>> f = BytesIO()
>>> w = csv.writer(f, encoding='utf-8')
>>> _ = w.writerow((u'é', u'ñ'))
>>> _ = f.seek(0)
>>> r = csv.reader(f, encoding='utf-8')
>>> next(r) == [u'é', u'ñ']
True


来源:https://stackoverflow.com/questions/34599900/mysql-export-to-csv-file-as-utf-8-via-python-script

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!