Browse Source

Initial post

yulai.li 7 years ago
commit
0d5d3bdbfd

+ 6 - 0
.gitignore

@@ -0,0 +1,6 @@
+i*.swp
+build/*
+*.pyc
+.DS_Store
+**/.DS_Store
+.idea

+ 201 - 0
LICENSE

@@ -0,0 +1,201 @@
+Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "{}"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright {}
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.

+ 36 - 0
README.md

@@ -0,0 +1,36 @@
+# TiDB django backend
+
+This library is based on django’s mysql backend and change some logic and support features to fit TiDB.
+
+For now this package is tested in python 2.7
+
+# Usage
+
+### Install Package
+
+```
+# git clone http://github.com/blacktear23/tidb_django.git
+# cd tidb_django
+# python setup.py install
+```
+
+### Update settings.py
+
+```python
+DATABASES = {
+    'default': {
+        'ENGINE': 'django_tidb.tidb',
+        'NAME': 'database',
+        'USER': 'username',
+        'PASSWORD': 'password',
+        'HOST': '127.0.0.1',
+        'PORT': 4000,
+    }
+}
+```
+
+# Different Between MySQL
+
+* TiDB not support foreign key
+* TiDB require delete index before drop column
+* TiDB not support `CASCADE` key word when drop index and drop column

+ 0 - 0
django_tidb/__init__.py


+ 0 - 0
django_tidb/tidb/__init__.py


+ 376 - 0
django_tidb/tidb/base.py

@@ -0,0 +1,376 @@
+"""
+MySQL database backend for Django.
+
+Requires mysqlclient: https://pypi.python.org/pypi/mysqlclient/
+MySQLdb is supported for Python 2 only: http://sourceforge.net/projects/mysql-python
+"""
+from __future__ import unicode_literals
+
+import datetime
+import re
+import sys
+import warnings
+
+from django.conf import settings
+from django.db import utils
+from django.db.backends import utils as backend_utils
+from django.db.backends.base.base import BaseDatabaseWrapper
+from django.utils import six, timezone
+from django.utils.encoding import force_str
+from django.utils.functional import cached_property
+from django.utils.safestring import SafeBytes, SafeText
+
+try:
+    import MySQLdb as Database
+except ImportError as e:
+    from django.core.exceptions import ImproperlyConfigured
+    raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
+
+from MySQLdb.constants import CLIENT, FIELD_TYPE                # isort:skip
+from MySQLdb.converters import Thing2Literal, conversions       # isort:skip
+
+# Some of these import MySQLdb, so import them after checking if it's installed.
+from .client import DatabaseClient                          # isort:skip
+from .creation import DatabaseCreation                      # isort:skip
+from .features import DatabaseFeatures                      # isort:skip
+from .introspection import DatabaseIntrospection            # isort:skip
+from .operations import DatabaseOperations                  # isort:skip
+from .schema import DatabaseSchemaEditor                    # isort:skip
+from .validation import DatabaseValidation                  # isort:skip
+
+# We want version (1, 2, 1, 'final', 2) or later. We can't just use
+# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
+# inadvertently passes the version test.
+version = Database.version_info
+if (version < (1, 2, 1) or (version[:3] == (1, 2, 1) and
+        (len(version) < 5 or version[3] != 'final' or version[4] < 2))):
+    from django.core.exceptions import ImproperlyConfigured
+    raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
+
+
+DatabaseError = Database.DatabaseError
+IntegrityError = Database.IntegrityError
+
+# It's impossible to import datetime_or_None directly from MySQLdb.times
+parse_datetime = conversions[FIELD_TYPE.DATETIME]
+
+
+def parse_datetime_with_timezone_support(value):
+    dt = parse_datetime(value)
+    # Confirm that dt is naive before overwriting its tzinfo.
+    if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
+        dt = dt.replace(tzinfo=timezone.utc)
+    return dt
+
+
+def adapt_datetime_with_timezone_support(value, conv):
+    # Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
+    if settings.USE_TZ:
+        if timezone.is_naive(value):
+            warnings.warn("MySQL received a naive datetime (%s)"
+                          " while time zone support is active." % value,
+                          RuntimeWarning)
+            default_timezone = timezone.get_default_timezone()
+            value = timezone.make_aware(value, default_timezone)
+        value = value.astimezone(timezone.utc).replace(tzinfo=None)
+    return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S.%f"), conv)
+
+# MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like
+# timedelta in terms of actual behavior as they are signed and include days --
+# and Django expects time, so we still need to override that. We also need to
+# add special handling for SafeText and SafeBytes as MySQLdb's type
+# checking is too tight to catch those (see Django ticket #6052).
+# Finally, MySQLdb always returns naive datetime objects. However, when
+# timezone support is active, Django expects timezone-aware datetime objects.
+django_conversions = conversions.copy()
+django_conversions.update({
+    FIELD_TYPE.TIME: backend_utils.typecast_time,
+    FIELD_TYPE.DECIMAL: backend_utils.typecast_decimal,
+    FIELD_TYPE.NEWDECIMAL: backend_utils.typecast_decimal,
+    FIELD_TYPE.DATETIME: parse_datetime_with_timezone_support,
+    datetime.datetime: adapt_datetime_with_timezone_support,
+})
+
+# This should match the numerical portion of the version numbers (we can treat
+# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
+# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
+# http://dev.mysql.com/doc/refman/5.0/en/news.html .
+server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
+
+
+# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
+# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
+# point is to raise Warnings as exceptions, this can be done with the Python
+# warning module, and this is setup when the connection is created, and the
+# standard backend_utils.CursorDebugWrapper can be used. Also, using sql_mode
+# TRADITIONAL will automatically cause most warnings to be treated as errors.
+
+class CursorWrapper(object):
+    """
+    A thin wrapper around MySQLdb's normal cursor class so that we can catch
+    particular exception instances and reraise them with the right types.
+
+    Implemented as a wrapper, rather than a subclass, so that we aren't stuck
+    to the particular underlying representation returned by Connection.cursor().
+    """
+    codes_for_integrityerror = (1048,)
+
+    def __init__(self, cursor):
+        self.cursor = cursor
+
+    def execute(self, query, args=None):
+        try:
+            # args is None means no string interpolation
+            return self.cursor.execute(query, args)
+        except Database.OperationalError as e:
+            # Map some error codes to IntegrityError, since they seem to be
+            # misclassified and Django would prefer the more logical place.
+            if e.args[0] in self.codes_for_integrityerror:
+                six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
+            raise
+
+    def executemany(self, query, args):
+        try:
+            return self.cursor.executemany(query, args)
+        except Database.OperationalError as e:
+            # Map some error codes to IntegrityError, since they seem to be
+            # misclassified and Django would prefer the more logical place.
+            if e.args[0] in self.codes_for_integrityerror:
+                six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
+            raise
+
+    def __getattr__(self, attr):
+        if attr in self.__dict__:
+            return self.__dict__[attr]
+        else:
+            return getattr(self.cursor, attr)
+
+    def __iter__(self):
+        return iter(self.cursor)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        # Ticket #17671 - Close instead of passing thru to avoid backend
+        # specific behavior.
+        self.close()
+
+
+class DatabaseWrapper(BaseDatabaseWrapper):
+    vendor = 'mysql'
+    # This dictionary maps Field objects to their associated MySQL column
+    # types, as strings. Column-type strings can contain format strings; they'll
+    # be interpolated against the values of Field.__dict__ before being output.
+    # If a column type is set to None, it won't be included in the output.
+    _data_types = {
+        'AutoField': 'integer AUTO_INCREMENT',
+        'BinaryField': 'longblob',
+        'BooleanField': 'bool',
+        'CharField': 'varchar(%(max_length)s)',
+        'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
+        'DateField': 'date',
+        'DateTimeField': 'datetime',
+        'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
+        'DurationField': 'bigint',
+        'FileField': 'varchar(%(max_length)s)',
+        'FilePathField': 'varchar(%(max_length)s)',
+        'FloatField': 'double precision',
+        'IntegerField': 'integer',
+        'BigIntegerField': 'bigint',
+        'IPAddressField': 'char(15)',
+        'GenericIPAddressField': 'char(39)',
+        'NullBooleanField': 'bool',
+        'OneToOneField': 'integer',
+        'PositiveIntegerField': 'integer UNSIGNED',
+        'PositiveSmallIntegerField': 'smallint UNSIGNED',
+        'SlugField': 'varchar(%(max_length)s)',
+        'SmallIntegerField': 'smallint',
+        'TextField': 'longtext',
+        'TimeField': 'time',
+        'UUIDField': 'char(32)',
+    }
+
+    @cached_property
+    def data_types(self):
+        if self.features.supports_microsecond_precision:
+            return dict(self._data_types, DateTimeField='datetime(6)', TimeField='time(6)')
+        else:
+            return self._data_types
+
+    operators = {
+        'exact': '= %s',
+        'iexact': 'LIKE %s',
+        'contains': 'LIKE BINARY %s',
+        'icontains': 'LIKE %s',
+        'regex': 'REGEXP BINARY %s',
+        'iregex': 'REGEXP %s',
+        'gt': '> %s',
+        'gte': '>= %s',
+        'lt': '< %s',
+        'lte': '<= %s',
+        'startswith': 'LIKE BINARY %s',
+        'endswith': 'LIKE BINARY %s',
+        'istartswith': 'LIKE %s',
+        'iendswith': 'LIKE %s',
+    }
+
+    # The patterns below are used to generate SQL pattern lookup clauses when
+    # the right-hand side of the lookup isn't a raw string (it might be an expression
+    # or the result of a bilateral transformation).
+    # In those cases, special characters for LIKE operators (e.g. \, *, _) should be
+    # escaped on database side.
+    #
+    # Note: we use str.format() here for readability as '%' is used as a wildcard for
+    # the LIKE operator.
+    pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')"
+    pattern_ops = {
+        'contains': "LIKE BINARY CONCAT('%%', {}, '%%')",
+        'icontains': "LIKE CONCAT('%%', {}, '%%')",
+        'startswith': "LIKE BINARY CONCAT({}, '%%')",
+        'istartswith': "LIKE CONCAT({}, '%%')",
+        'endswith': "LIKE BINARY CONCAT('%%', {})",
+        'iendswith': "LIKE CONCAT('%%', {})",
+    }
+
+    Database = Database
+    SchemaEditorClass = DatabaseSchemaEditor
+
+    def __init__(self, *args, **kwargs):
+        super(DatabaseWrapper, self).__init__(*args, **kwargs)
+
+        self.features = DatabaseFeatures(self)
+        self.ops = DatabaseOperations(self)
+        self.client = DatabaseClient(self)
+        self.creation = DatabaseCreation(self)
+        self.introspection = DatabaseIntrospection(self)
+        self.validation = DatabaseValidation(self)
+
+    def get_connection_params(self):
+        kwargs = {
+            'conv': django_conversions,
+            'charset': 'utf8',
+        }
+        if six.PY2:
+            kwargs['use_unicode'] = True
+        settings_dict = self.settings_dict
+        if settings_dict['USER']:
+            kwargs['user'] = settings_dict['USER']
+        if settings_dict['NAME']:
+            kwargs['db'] = settings_dict['NAME']
+        if settings_dict['PASSWORD']:
+            kwargs['passwd'] = force_str(settings_dict['PASSWORD'])
+        if settings_dict['HOST'].startswith('/'):
+            kwargs['unix_socket'] = settings_dict['HOST']
+        elif settings_dict['HOST']:
+            kwargs['host'] = settings_dict['HOST']
+        if settings_dict['PORT']:
+            kwargs['port'] = int(settings_dict['PORT'])
+        # We need the number of potentially affected rows after an
+        # "UPDATE", not the number of changed rows.
+        kwargs['client_flag'] = CLIENT.FOUND_ROWS
+        kwargs.update(settings_dict['OPTIONS'])
+        return kwargs
+
+    def get_new_connection(self, conn_params):
+        conn = Database.connect(**conn_params)
+        conn.encoders[SafeText] = conn.encoders[six.text_type]
+        conn.encoders[SafeBytes] = conn.encoders[bytes]
+        return conn
+
+    def init_connection_state(self):
+        with self.cursor() as cursor:
+            # SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column
+            # on a recently-inserted row will return when the field is tested for
+            # NULL.  Disabling this value brings this aspect of MySQL in line with
+            # SQL standards.
+            cursor.execute('SET SQL_AUTO_IS_NULL = 0')
+
+    def create_cursor(self):
+        cursor = self.connection.cursor()
+        return CursorWrapper(cursor)
+
+    def _rollback(self):
+        try:
+            BaseDatabaseWrapper._rollback(self)
+        except Database.NotSupportedError:
+            pass
+
+    def _set_autocommit(self, autocommit):
+        with self.wrap_database_errors:
+            self.connection.autocommit(autocommit)
+
+    def disable_constraint_checking(self):
+        """
+        Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
+        to indicate constraint checks need to be re-enabled.
+        """
+        self.cursor().execute('SET foreign_key_checks=0')
+        return True
+
+    def enable_constraint_checking(self):
+        """
+        Re-enable foreign key checks after they have been disabled.
+        """
+        # Override needs_rollback in case constraint_checks_disabled is
+        # nested inside transaction.atomic.
+        self.needs_rollback, needs_rollback = False, self.needs_rollback
+        try:
+            self.cursor().execute('SET foreign_key_checks=1')
+        finally:
+            self.needs_rollback = needs_rollback
+
+    def check_constraints(self, table_names=None):
+        """
+        Checks each table name in `table_names` for rows with invalid foreign
+        key references. This method is intended to be used in conjunction with
+        `disable_constraint_checking()` and `enable_constraint_checking()`, to
+        determine if rows with invalid references were entered while constraint
+        checks were off.
+
+        Raises an IntegrityError on the first invalid foreign key reference
+        encountered (if any) and provides detailed information about the
+        invalid reference in the error message.
+
+        Backends can override this method if they can more directly apply
+        constraint checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE")
+        """
+        cursor = self.cursor()
+        if table_names is None:
+            table_names = self.introspection.table_names(cursor)
+        for table_name in table_names:
+            primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
+            if not primary_key_column_name:
+                continue
+            key_columns = self.introspection.get_key_columns(cursor, table_name)
+            for column_name, referenced_table_name, referenced_column_name in key_columns:
+                cursor.execute("""
+                    SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
+                    LEFT JOIN `%s` as REFERRED
+                    ON (REFERRING.`%s` = REFERRED.`%s`)
+                    WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
+                    % (primary_key_column_name, column_name, table_name, referenced_table_name,
+                    column_name, referenced_column_name, column_name, referenced_column_name))
+                for bad_row in cursor.fetchall():
+                    raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
+                        "foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
+                        % (table_name, bad_row[0],
+                        table_name, column_name, bad_row[1],
+                        referenced_table_name, referenced_column_name))
+
+    def is_usable(self):
+        try:
+            self.connection.ping()
+        except Database.Error:
+            return False
+        else:
+            return True
+
+    @cached_property
+    def mysql_version(self):
+        with self.temporary_connection():
+            server_info = self.connection.get_server_info()
+        match = server_version_re.match(server_info)
+        if not match:
+            raise Exception('Unable to determine MySQL version from version string %r' % server_info)
+        return tuple(int(x) for x in match.groups())

+ 42 - 0
django_tidb/tidb/client.py

@@ -0,0 +1,42 @@
+import subprocess
+
+from django.db.backends.base.client import BaseDatabaseClient
+
+
+class DatabaseClient(BaseDatabaseClient):
+    executable_name = 'mysql'
+
+    @classmethod
+    def settings_to_cmd_args(cls, settings_dict):
+        args = [cls.executable_name]
+        db = settings_dict['OPTIONS'].get('db', settings_dict['NAME'])
+        user = settings_dict['OPTIONS'].get('user', settings_dict['USER'])
+        passwd = settings_dict['OPTIONS'].get('passwd', settings_dict['PASSWORD'])
+        host = settings_dict['OPTIONS'].get('host', settings_dict['HOST'])
+        port = settings_dict['OPTIONS'].get('port', settings_dict['PORT'])
+        cert = settings_dict['OPTIONS'].get('ssl', {}).get('ca')
+        defaults_file = settings_dict['OPTIONS'].get('read_default_file')
+        # Seems to be no good way to set sql_mode with CLI.
+
+        if defaults_file:
+            args += ["--defaults-file=%s" % defaults_file]
+        if user:
+            args += ["--user=%s" % user]
+        if passwd:
+            args += ["--password=%s" % passwd]
+        if host:
+            if '/' in host:
+                args += ["--socket=%s" % host]
+            else:
+                args += ["--host=%s" % host]
+        if port:
+            args += ["--port=%s" % port]
+        if cert:
+            args += ["--ssl-ca=%s" % cert]
+        if db:
+            args += [db]
+        return args
+
+    def runshell(self):
+        args = DatabaseClient.settings_to_cmd_args(self.connection.settings_dict)
+        subprocess.call(args)

+ 25 - 0
django_tidb/tidb/compiler.py

@@ -0,0 +1,25 @@
+from django.db.models.sql import compiler
+
+
+class SQLCompiler(compiler.SQLCompiler):
+    def as_subquery_condition(self, alias, columns, compiler):
+        qn = compiler.quote_name_unless_alias
+        qn2 = self.connection.ops.quote_name
+        sql, params = self.as_sql()
+        return '(%s) IN (%s)' % (', '.join('%s.%s' % (qn(alias), qn2(column)) for column in columns), sql), params
+
+
+class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):
+    pass
+
+
+class SQLDeleteCompiler(compiler.SQLDeleteCompiler, SQLCompiler):
+    pass
+
+
+class SQLUpdateCompiler(compiler.SQLUpdateCompiler, SQLCompiler):
+    pass
+
+
+class SQLAggregateCompiler(compiler.SQLAggregateCompiler, SQLCompiler):
+    pass

+ 43 - 0
django_tidb/tidb/creation.py

@@ -0,0 +1,43 @@
+from django.db.backends.base.creation import BaseDatabaseCreation
+
+
+class DatabaseCreation(BaseDatabaseCreation):
+
+    def sql_table_creation_suffix(self):
+        suffix = []
+        test_settings = self.connection.settings_dict['TEST']
+        if test_settings['CHARSET']:
+            suffix.append('CHARACTER SET %s' % test_settings['CHARSET'])
+        if test_settings['COLLATION']:
+            suffix.append('COLLATE %s' % test_settings['COLLATION'])
+        return ' '.join(suffix)
+
+    def sql_for_inline_foreign_key_references(self, model, field, known_models, style):
+        "All inline references are pending under MySQL"
+        return [], True
+
+    def sql_destroy_indexes_for_fields(self, model, fields, style):
+        if len(fields) == 1 and fields[0].db_tablespace:
+            tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
+        elif model._meta.db_tablespace:
+            tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
+        else:
+            tablespace_sql = ""
+        if tablespace_sql:
+            tablespace_sql = " " + tablespace_sql
+
+        field_names = []
+        qn = self.connection.ops.quote_name
+        for f in fields:
+            field_names.append(style.SQL_FIELD(qn(f.column)))
+
+        index_name = "%s_%s" % (model._meta.db_table, self._digest([f.name for f in fields]))
+
+        from ..utils import truncate_name
+
+        return [
+            style.SQL_KEYWORD("DROP INDEX") + " " +
+            style.SQL_TABLE(qn(truncate_name(index_name, self.connection.ops.max_name_length()))) + " " +
+            style.SQL_KEYWORD("ON") + " " +
+            style.SQL_TABLE(qn(model._meta.db_table)) + ";",
+        ]

+ 71 - 0
django_tidb/tidb/features.py

@@ -0,0 +1,71 @@
+from django.db.backends.base.features import BaseDatabaseFeatures
+from django.utils.functional import cached_property
+
+from .base import Database
+
+try:
+    import pytz
+except ImportError:
+    pytz = None
+
+
+class DatabaseFeatures(BaseDatabaseFeatures):
+    empty_fetchmany_value = ()
+    update_can_self_select = False
+    allows_group_by_pk = True
+    related_fields_match_type = True
+    allow_sliced_subqueries = False
+    has_bulk_insert = True
+    has_select_for_update = True
+    has_select_for_update_nowait = False
+    supports_forward_references = False
+    supports_regex_backreferencing = False
+    supports_date_lookup_using_string = False
+    supports_foreign_keys = False
+    can_introspect_autofield = True
+    can_introspect_binary_field = False
+    can_introspect_small_integer_field = True
+    supports_timezones = False
+    requires_explicit_null_ordering_when_grouping = True
+    allows_auto_pk_0 = False
+    uses_savepoints = True
+    can_release_savepoints = True
+    atomic_transactions = False
+    supports_column_check_constraints = False
+
+    @cached_property
+    def _mysql_storage_engine(self):
+        "Internal method used in Django tests. Don't rely on this from your code"
+        with self.connection.cursor() as cursor:
+            cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'")
+            result = cursor.fetchone()
+        return result[0]
+
+    @cached_property
+    def can_introspect_foreign_keys(self):
+        "Confirm support for introspected foreign keys"
+        return self._mysql_storage_engine != 'MyISAM'
+
+    @cached_property
+    def supports_microsecond_precision(self):
+        # See https://github.com/farcepest/MySQLdb1/issues/24 for the reason
+        # about requiring MySQLdb 1.2.5
+        return self.connection.mysql_version >= (5, 6, 4) and Database.version_info >= (1, 2, 5)
+
+    @cached_property
+    def has_zoneinfo_database(self):
+        # MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects
+        # abbreviations (eg. EAT). When pytz isn't installed and the current
+        # time zone is LocalTimezone (the only sensible value in this
+        # context), the current time zone name will be an abbreviation. As a
+        # consequence, MySQL cannot perform time zone conversions reliably.
+        if pytz is None:
+            return False
+
+        # Test if the time zone definitions are installed.
+        with self.connection.cursor() as cursor:
+            cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
+            return cursor.fetchone() is not None
+
+    def introspected_boolean_field_type(self, *args, **kwargs):
+        return 'IntegerField'

+ 206 - 0
django_tidb/tidb/introspection.py

@@ -0,0 +1,206 @@
+from collections import namedtuple
+
+from MySQLdb.constants import FIELD_TYPE
+
+from django.db.backends.base.introspection import (
+    BaseDatabaseIntrospection, FieldInfo, TableInfo,
+)
+from django.utils.datastructures import OrderedSet
+from django.utils.encoding import force_text
+
+FieldInfo = namedtuple('FieldInfo', FieldInfo._fields + ('extra',))
+
+
+class DatabaseIntrospection(BaseDatabaseIntrospection):
+    data_types_reverse = {
+        FIELD_TYPE.BLOB: 'TextField',
+        FIELD_TYPE.CHAR: 'CharField',
+        FIELD_TYPE.DECIMAL: 'DecimalField',
+        FIELD_TYPE.NEWDECIMAL: 'DecimalField',
+        FIELD_TYPE.DATE: 'DateField',
+        FIELD_TYPE.DATETIME: 'DateTimeField',
+        FIELD_TYPE.DOUBLE: 'FloatField',
+        FIELD_TYPE.FLOAT: 'FloatField',
+        FIELD_TYPE.INT24: 'IntegerField',
+        FIELD_TYPE.LONG: 'IntegerField',
+        FIELD_TYPE.LONGLONG: 'BigIntegerField',
+        FIELD_TYPE.SHORT: 'SmallIntegerField',
+        FIELD_TYPE.STRING: 'CharField',
+        FIELD_TYPE.TIME: 'TimeField',
+        FIELD_TYPE.TIMESTAMP: 'DateTimeField',
+        FIELD_TYPE.TINY: 'IntegerField',
+        FIELD_TYPE.TINY_BLOB: 'TextField',
+        FIELD_TYPE.MEDIUM_BLOB: 'TextField',
+        FIELD_TYPE.LONG_BLOB: 'TextField',
+        FIELD_TYPE.VAR_STRING: 'CharField',
+    }
+
+    def get_field_type(self, data_type, description):
+        field_type = super(DatabaseIntrospection, self).get_field_type(data_type, description)
+        if field_type == 'IntegerField' and 'auto_increment' in description.extra:
+            return 'AutoField'
+        return field_type
+
+    def get_table_list(self, cursor):
+        """
+        Returns a list of table and view names in the current database.
+        """
+        cursor.execute("SHOW FULL TABLES")
+        return [TableInfo(row[0], {'BASE TABLE': 't', 'VIEW': 'v'}.get(row[1]))
+                for row in cursor.fetchall()]
+
+    def get_table_description(self, cursor, table_name):
+        """
+        Returns a description of the table, with the DB-API cursor.description interface."
+        """
+        # information_schema database gives more accurate results for some figures:
+        # - varchar length returned by cursor.description is an internal length,
+        #   not visible length (#5725)
+        # - precision and scale (for decimal fields) (#5014)
+        # - auto_increment is not available in cursor.description
+        InfoLine = namedtuple('InfoLine', 'col_name data_type max_len num_prec num_scale extra')
+        cursor.execute("""
+            SELECT column_name, data_type, character_maximum_length, numeric_precision, numeric_scale, extra
+            FROM information_schema.columns
+            WHERE table_name = %s AND table_schema = DATABASE()""", [table_name])
+        field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
+
+        cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
+        to_int = lambda i: int(i) if i is not None else i
+        fields = []
+        for line in cursor.description:
+            col_name = force_text(line[0])
+            fields.append(
+                FieldInfo(*((col_name,)
+                            + line[1:3]
+                            + (to_int(field_info[col_name].max_len) or line[3],
+                               to_int(field_info[col_name].num_prec) or line[4],
+                               to_int(field_info[col_name].num_scale) or line[5])
+                            + (line[6],)
+                            + (field_info[col_name].extra,)))
+            )
+        return fields
+
+    def get_relations(self, cursor, table_name):
+        """
+        Returns a dictionary of {field_name: (field_name_other_table, other_table)}
+        representing all relationships to the given table.
+        """
+        constraints = self.get_key_columns(cursor, table_name)
+        relations = {}
+        for my_fieldname, other_table, other_field in constraints:
+            relations[my_fieldname] = (other_field, other_table)
+        return relations
+
+    def get_key_columns(self, cursor, table_name):
+        """
+        Returns a list of (column_name, referenced_table_name, referenced_column_name) for all
+        key columns in given table.
+        """
+        key_columns = []
+        cursor.execute("""
+            SELECT column_name, referenced_table_name, referenced_column_name
+            FROM information_schema.key_column_usage
+            WHERE table_name = %s
+                AND table_schema = DATABASE()
+                AND referenced_table_name IS NOT NULL
+                AND referenced_column_name IS NOT NULL""", [table_name])
+        key_columns.extend(cursor.fetchall())
+        return key_columns
+
+    def get_indexes(self, cursor, table_name):
+        cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
+        # Do a two-pass search for indexes: on first pass check which indexes
+        # are multicolumn, on second pass check which single-column indexes
+        # are present.
+        rows = list(cursor.fetchall())
+        multicol_indexes = set()
+        for row in rows:
+            if row[3] > 1:
+                multicol_indexes.add(row[2])
+        indexes = {}
+        for row in rows:
+            if row[2] in multicol_indexes:
+                continue
+            if row[4] not in indexes:
+                indexes[row[4]] = {'primary_key': False, 'unique': False}
+            # It's possible to have the unique and PK constraints in separate indexes.
+            if row[2] == 'PRIMARY':
+                indexes[row[4]]['primary_key'] = True
+            if not row[1]:
+                indexes[row[4]]['unique'] = True
+        return indexes
+
+    def get_storage_engine(self, cursor, table_name):
+        """
+        Retrieves the storage engine for a given table. Returns the default
+        storage engine if the table doesn't exist.
+        """
+        cursor.execute(
+            "SELECT engine "
+            "FROM information_schema.tables "
+            "WHERE table_name = %s", [table_name])
+        result = cursor.fetchone()
+        if not result:
+            return self.connection.features._mysql_storage_engine
+        return result[0]
+
+    def get_constraints(self, cursor, table_name):
+        """
+        Retrieves any constraints or keys (unique, pk, fk, check, index) across one or more columns.
+        """
+        constraints = {}
+        # Get the actual constraint names and columns
+        name_query = """
+            SELECT kc.`constraint_name`, kc.`column_name`,
+                kc.`referenced_table_name`, kc.`referenced_column_name`
+            FROM information_schema.key_column_usage AS kc
+            WHERE
+                kc.table_schema = %s AND
+                kc.table_name = %s
+        """
+        cursor.execute(name_query, [self.connection.settings_dict['NAME'], table_name])
+        for constraint, column, ref_table, ref_column in cursor.fetchall():
+            if constraint not in constraints:
+                constraints[constraint] = {
+                    'columns': OrderedSet(),
+                    'primary_key': False,
+                    'unique': False,
+                    'index': False,
+                    'check': False,
+                    'foreign_key': (ref_table, ref_column) if ref_column else None,
+                }
+            constraints[constraint]['columns'].add(column)
+        # Now get the constraint types
+        type_query = """
+            SELECT c.constraint_name, c.constraint_type
+            FROM information_schema.table_constraints AS c
+            WHERE
+                c.table_schema = %s AND
+                c.table_name = %s
+        """
+        cursor.execute(type_query, [self.connection.settings_dict['NAME'], table_name])
+        for constraint, kind in cursor.fetchall():
+            if kind.lower() == "primary key":
+                constraints[constraint]['primary_key'] = True
+                constraints[constraint]['unique'] = True
+            elif kind.lower() == "unique":
+                constraints[constraint]['unique'] = True
+        # Now add in the indexes
+        cursor.execute("SHOW INDEX FROM %s" % self.connection.ops.quote_name(table_name))
+        for table, non_unique, index, colseq, column in [x[:5] for x in cursor.fetchall()]:
+            if index not in constraints:
+                constraints[index] = {
+                    'columns': OrderedSet(),
+                    'primary_key': False,
+                    'unique': False,
+                    'index': True,
+                    'check': False,
+                    'foreign_key': None,
+                }
+            constraints[index]['index'] = True
+            constraints[index]['columns'].add(column)
+        # Convert the sorted sets to lists
+        for constraint in constraints.values():
+            constraint['columns'] = list(constraint['columns'])
+        return constraints

+ 206 - 0
django_tidb/tidb/operations.py

@@ -0,0 +1,206 @@
+from __future__ import unicode_literals
+
+import uuid
+
+from django.conf import settings
+from django.db.backends.base.operations import BaseDatabaseOperations
+from django.utils import six, timezone
+from django.utils.encoding import force_text
+
+
+class DatabaseOperations(BaseDatabaseOperations):
+    compiler_module = "django.db.backends.mysql.compiler"
+
+    # MySQL stores positive fields as UNSIGNED ints.
+    integer_field_ranges = dict(BaseDatabaseOperations.integer_field_ranges,
+        PositiveSmallIntegerField=(0, 65535),
+        PositiveIntegerField=(0, 4294967295),
+    )
+
+    def date_extract_sql(self, lookup_type, field_name):
+        # http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
+        if lookup_type == 'week_day':
+            # DAYOFWEEK() returns an integer, 1-7, Sunday=1.
+            # Note: WEEKDAY() returns 0-6, Monday=0.
+            return "DAYOFWEEK(%s)" % field_name
+        else:
+            return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
+
+    def date_trunc_sql(self, lookup_type, field_name):
+        fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
+        format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s')  # Use double percents to escape.
+        format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
+        try:
+            i = fields.index(lookup_type) + 1
+        except ValueError:
+            sql = field_name
+        else:
+            format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
+            sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
+        return sql
+
+    def datetime_extract_sql(self, lookup_type, field_name, tzname):
+        if settings.USE_TZ:
+            field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
+            params = [tzname]
+        else:
+            params = []
+        # http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
+        if lookup_type == 'week_day':
+            # DAYOFWEEK() returns an integer, 1-7, Sunday=1.
+            # Note: WEEKDAY() returns 0-6, Monday=0.
+            sql = "DAYOFWEEK(%s)" % field_name
+        else:
+            sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
+        return sql, params
+
+    def datetime_trunc_sql(self, lookup_type, field_name, tzname):
+        if settings.USE_TZ:
+            field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
+            params = [tzname]
+        else:
+            params = []
+        fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
+        format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s')  # Use double percents to escape.
+        format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
+        try:
+            i = fields.index(lookup_type) + 1
+        except ValueError:
+            sql = field_name
+        else:
+            format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
+            sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
+        return sql, params
+
+    def date_interval_sql(self, timedelta):
+        return "INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND" % (
+            timedelta.days, timedelta.seconds, timedelta.microseconds), []
+
+    def format_for_duration_arithmetic(self, sql):
+        if self.connection.features.supports_microsecond_precision:
+            return 'INTERVAL %s MICROSECOND' % sql
+        else:
+            return 'INTERVAL FLOOR(%s / 1000000) SECOND' % sql
+
+    def drop_foreignkey_sql(self):
+        return "DROP FOREIGN KEY"
+
+    def force_no_ordering(self):
+        """
+        "ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
+        columns. If no ordering would otherwise be applied, we don't want any
+        implicit sorting going on.
+        """
+        return [(None, ("NULL", [], False))]
+
+    def fulltext_search_sql(self, field_name):
+        return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
+
+    def last_executed_query(self, cursor, sql, params):
+        # With MySQLdb, cursor objects have an (undocumented) "_last_executed"
+        # attribute where the exact query sent to the database is saved.
+        # See MySQLdb/cursors.py in the source distribution.
+        return force_text(getattr(cursor, '_last_executed', None), errors='replace')
+
+    def no_limit_value(self):
+        # 2**64 - 1, as recommended by the MySQL documentation
+        return 18446744073709551615
+
+    def quote_name(self, name):
+        if name.startswith("`") and name.endswith("`"):
+            return name  # Quoting once is enough.
+        return "`%s`" % name
+
+    def random_function_sql(self):
+        return 'RAND()'
+
+    def sql_flush(self, style, tables, sequences, allow_cascade=False):
+        # NB: The generated SQL below is specific to MySQL
+        # 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
+        # to clear all tables of all data
+        if tables:
+            sql = ['SET FOREIGN_KEY_CHECKS = 0;']
+            for table in tables:
+                sql.append('%s %s;' % (
+                    style.SQL_KEYWORD('TRUNCATE'),
+                    style.SQL_FIELD(self.quote_name(table)),
+                ))
+            sql.append('SET FOREIGN_KEY_CHECKS = 1;')
+            sql.extend(self.sequence_reset_by_name_sql(style, sequences))
+            return sql
+        else:
+            return []
+
+    def validate_autopk_value(self, value):
+        # MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
+        if value == 0:
+            raise ValueError('The database backend does not accept 0 as a '
+                             'value for AutoField.')
+        return value
+
+    def value_to_db_datetime(self, value):
+        if value is None:
+            return None
+
+        # MySQL doesn't support tz-aware datetimes
+        if timezone.is_aware(value):
+            if settings.USE_TZ:
+                value = value.astimezone(timezone.utc).replace(tzinfo=None)
+            else:
+                raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
+
+        if not self.connection.features.supports_microsecond_precision:
+            value = value.replace(microsecond=0)
+
+        return six.text_type(value)
+
+    def value_to_db_time(self, value):
+        if value is None:
+            return None
+
+        # MySQL doesn't support tz-aware times
+        if timezone.is_aware(value):
+            raise ValueError("MySQL backend does not support timezone-aware times.")
+
+        return six.text_type(value)
+
+    def max_name_length(self):
+        return 64
+
+    def bulk_insert_sql(self, fields, num_values):
+        items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
+        return "VALUES " + ", ".join([items_sql] * num_values)
+
+    def combine_expression(self, connector, sub_expressions):
+        """
+        MySQL requires special cases for ^ operators in query expressions
+        """
+        if connector == '^':
+            return 'POW(%s)' % ','.join(sub_expressions)
+        return super(DatabaseOperations, self).combine_expression(connector, sub_expressions)
+
+    def get_db_converters(self, expression):
+        converters = super(DatabaseOperations, self).get_db_converters(expression)
+        internal_type = expression.output_field.get_internal_type()
+        if internal_type in ['BooleanField', 'NullBooleanField']:
+            converters.append(self.convert_booleanfield_value)
+        if internal_type == 'UUIDField':
+            converters.append(self.convert_uuidfield_value)
+        if internal_type == 'TextField':
+            converters.append(self.convert_textfield_value)
+        return converters
+
+    def convert_booleanfield_value(self, value, expression, connection, context):
+        if value in (0, 1):
+            value = bool(value)
+        return value
+
+    def convert_uuidfield_value(self, value, expression, connection, context):
+        if value is not None:
+            value = uuid.UUID(value)
+        return value
+
+    def convert_textfield_value(self, value, expression, connection, context):
+        if value is not None:
+            value = force_text(value)
+        return value

+ 125 - 0
django_tidb/tidb/schema.py

@@ -0,0 +1,125 @@
+from django.db.backends.base.schema import BaseDatabaseSchemaEditor
+from django.db.models import NOT_PROVIDED
+
+
+class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
+
+    sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
+
+    sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
+    sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
+    sql_alter_column_type = "MODIFY %(column)s %(type)s"
+    sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
+
+    sql_delete_table = "DROP TABLE %(table)s"
+    sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s"
+    sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
+    sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
+
+    alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
+    alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
+
+    sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
+    sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
+
+    def quote_value(self, value):
+        # Inner import to allow module to fail to load gracefully
+        import MySQLdb.converters
+        return MySQLdb.escape(value, MySQLdb.converters.conversions)
+
+    def skip_default(self, field):
+        """
+        MySQL doesn't accept default values for longtext and longblob
+        and implicitly treats these columns as nullable.
+        """
+        return field.db_type(self.connection) in {'longtext', 'longblob'}
+
+    def add_field(self, model, field):
+        super(DatabaseSchemaEditor, self).add_field(model, field)
+
+        # Simulate the effect of a one-off default.
+        # field.default may be unhashable, so a set isn't used for "in" check.
+        if self.skip_default(field) and field.default not in (None, NOT_PROVIDED):
+            effective_default = self.effective_default(field)
+            self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
+                'table': self.quote_name(model._meta.db_table),
+                'column': self.quote_name(field.column),
+            }, [effective_default])
+
+    def _model_indexes_sql(self, model):
+        storage = self.connection.introspection.get_storage_engine(
+            self.connection.cursor(), model._meta.db_table
+        )
+        if storage == "InnoDB":
+            for field in model._meta.local_fields:
+                if field.db_index and not field.unique and field.get_internal_type() == "ForeignKey":
+                    # Temporary setting db_index to False (in memory) to disable
+                    # index creation for FKs (index automatically created by MySQL)
+                    field.db_index = False
+        return super(DatabaseSchemaEditor, self)._model_indexes_sql(model)
+
+    def _delete_composed_index(self, model, fields, *args):
+        """
+        MySQL can remove an implicit FK index on a field when that field is
+        covered by another index like a unique_together. "covered" here means
+        that the more complex index starts like the simpler one.
+        http://bugs.mysql.com/bug.php?id=37910 / Django ticket #24757
+        We check here before removing the [unique|index]_together if we have to
+        recreate a FK index.
+        """
+        first_field = model._meta.get_field(fields[0])
+        if first_field.get_internal_type() == 'ForeignKey':
+            constraint_names = self._constraint_names(model, [first_field.column], index=True)
+            if not constraint_names:
+                self.execute(self._create_index_sql(model, [first_field], suffix=""))
+        return super(DatabaseSchemaEditor, self)._delete_composed_index(model, fields, *args)
+
+    def _set_field_new_type_null_status(self, field, new_type):
+        """
+        Keep the null property of the old field. If it has changed, it will be
+        handled separately.
+        """
+        if field.null:
+            new_type += " NULL"
+        else:
+            new_type += " NOT NULL"
+        return new_type
+
+    def _alter_column_type_sql(self, table, old_field, new_field, new_type):
+        new_type = self._set_field_new_type_null_status(old_field, new_type)
+        return super(DatabaseSchemaEditor, self)._alter_column_type_sql(table, old_field, new_field, new_type)
+
+    def _rename_field_sql(self, table, old_field, new_field, new_type):
+        new_type = self._set_field_new_type_null_status(old_field, new_type)
+        return super(DatabaseSchemaEditor, self)._rename_field_sql(table, old_field, new_field, new_type)
+
+    def remove_field(self, model, field):
+        """
+        Removes a field from a model. Usually involves deleting a column,
+        but for M2Ms may involve deleting a table.
+        """
+        # Special-case implicit M2M tables
+        if field.many_to_many and field.rel.through._meta.auto_created:
+            return self.delete_model(field.rel.through)
+        # It might not actually have a column behind it
+        if field.db_parameters(connection=self.connection)['type'] is None:
+            return
+        # Drop any FK constraints, MySQL requires explicit deletion
+        if field.rel:
+            fk_names = self._constraint_names(model, [field.column], foreign_key=True)
+            for fk_name in fk_names:
+                self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
+        # Drop any Index, TiDB requires explicite deletion
+        if field.db_index:
+            idx_names = self._constraint_names(model, [field.column], index=True)
+            for idx_name in idx_names:
+                self.execute(self._delete_constraint_sql(self.sql_delete_index, model, idx_name))
+        # Delete the column
+        sql = self.sql_delete_column % {
+            "table": self.quote_name(model._meta.db_table),
+            "column": self.quote_name(field.column),
+        }
+        self.execute(sql)
+        # Reset connection if required
+        if self.connection.features.connection_persists_old_columns:
+            self.connection.close()

+ 35 - 0
django_tidb/tidb/validation.py

@@ -0,0 +1,35 @@
+from django.core import checks
+from django.db.backends.base.validation import BaseDatabaseValidation
+
+
+class DatabaseValidation(BaseDatabaseValidation):
+    def check_field(self, field, **kwargs):
+        """
+        MySQL has the following field length restriction:
+        No character (varchar) fields can have a length exceeding 255
+        characters if they have a unique index on them.
+        """
+        from django.db import connection
+
+        errors = super(DatabaseValidation, self).check_field(field, **kwargs)
+
+        # Ignore any related fields.
+        if getattr(field, 'rel', None) is None:
+            field_type = field.db_type(connection)
+
+            # Ignore any non-concrete fields
+            if field_type is None:
+                return errors
+
+            if (field_type.startswith('varchar')  # Look for CharFields...
+                    and field.unique  # ... that are unique
+                    and (field.max_length is None or int(field.max_length) > 255)):
+                errors.append(
+                    checks.Error(
+                        ('MySQL does not allow unique CharFields to have a max_length > 255.'),
+                        hint=None,
+                        obj=field,
+                        id='mysql.E001',
+                    )
+                )
+        return errors

+ 24 - 0
setup.py

@@ -0,0 +1,24 @@
+from distutils.core import setup
+
+long_description = """TiDB backend for Django"""
+
+setup(
+    name='django_tidb',
+    version='1.0',
+    author='Rain Li',
+    author_email='blacktear23@gmail.com',
+    url='http://github.com/blacktear23/django_tidb',
+    download_url='http://github.com/blackear23/django_tidb',
+    description='TiDB backend for Django',
+    long_description=long_description,
+    packages=['django_tidb', 'django_tidb.tidb'],
+    license='Apache2',
+    classifiers=[
+        'Development Status :: 4 - Beta',
+        'Intended Audience :: Developers',
+        'Operating System :: OS Independent',
+        'Programming Language :: Python :: 2',
+        'Topic :: Database Driver',
+        'Topic :: Software Development :: Libraries',
+    ],
+)