Ver código fonte

README 수정, 일일, 월별 람다 함수 추가

LAPTOP-36F69PCF\HEEWON 3 anos atrás
pai
commit
9fbaaebf86
91 arquivos alterados com 8852 adições e 0 exclusões
  1. 18 0
      README.md
  2. 49 0
      ambt-preden-dailymigration-dev/lambda_function.py
  3. BIN
      ambt-preden-dailymigration-dev/my-deployment-package.zip
  4. 1 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/INSTALLER
  5. 19 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/LICENSE
  6. 180 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/METADATA
  7. 43 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/RECORD
  8. 0 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/REQUESTED
  9. 5 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/WHEEL
  10. 1 0
      ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/top_level.txt
  11. 185 0
      ambt-preden-dailymigration-dev/package/pymysql/__init__.py
  12. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/__init__.cpython-38.pyc
  13. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/_auth.cpython-38.pyc
  14. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/charset.cpython-38.pyc
  15. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/connections.cpython-38.pyc
  16. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/converters.cpython-38.pyc
  17. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/cursors.cpython-38.pyc
  18. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/err.cpython-38.pyc
  19. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/optionfile.cpython-38.pyc
  20. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/protocol.cpython-38.pyc
  21. BIN
      ambt-preden-dailymigration-dev/package/pymysql/__pycache__/times.cpython-38.pyc
  22. 266 0
      ambt-preden-dailymigration-dev/package/pymysql/_auth.py
  23. 209 0
      ambt-preden-dailymigration-dev/package/pymysql/charset.py
  24. 1367 0
      ambt-preden-dailymigration-dev/package/pymysql/connections.py
  25. 38 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/CLIENT.py
  26. 32 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/COMMAND.py
  27. 68 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/CR.py
  28. 474 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/ER.py
  29. 31 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/FIELD_TYPE.py
  30. 15 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/FLAG.py
  31. 10 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/SERVER_STATUS.py
  32. 0 0
      ambt-preden-dailymigration-dev/package/pymysql/constants/__init__.py
  33. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/CLIENT.cpython-38.pyc
  34. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/COMMAND.cpython-38.pyc
  35. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/CR.cpython-38.pyc
  36. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/ER.cpython-38.pyc
  37. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc
  38. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/FLAG.cpython-38.pyc
  39. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc
  40. BIN
      ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/__init__.cpython-38.pyc
  41. 364 0
      ambt-preden-dailymigration-dev/package/pymysql/converters.py
  42. 496 0
      ambt-preden-dailymigration-dev/package/pymysql/cursors.py
  43. 143 0
      ambt-preden-dailymigration-dev/package/pymysql/err.py
  44. 18 0
      ambt-preden-dailymigration-dev/package/pymysql/optionfile.py
  45. 358 0
      ambt-preden-dailymigration-dev/package/pymysql/protocol.py
  46. 20 0
      ambt-preden-dailymigration-dev/package/pymysql/times.py
  47. 99 0
      ambt-preden-monthlymigration-dev/lambda_function.py
  48. BIN
      ambt-preden-monthlymigration-dev/my-deployment-package.zip
  49. 1 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/INSTALLER
  50. 19 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/LICENSE
  51. 180 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/METADATA
  52. 43 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/RECORD
  53. 0 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/REQUESTED
  54. 5 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/WHEEL
  55. 1 0
      ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/top_level.txt
  56. 185 0
      ambt-preden-monthlymigration-dev/package/pymysql/__init__.py
  57. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/__init__.cpython-38.pyc
  58. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/_auth.cpython-38.pyc
  59. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/charset.cpython-38.pyc
  60. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/connections.cpython-38.pyc
  61. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/converters.cpython-38.pyc
  62. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/cursors.cpython-38.pyc
  63. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/err.cpython-38.pyc
  64. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/optionfile.cpython-38.pyc
  65. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/protocol.cpython-38.pyc
  66. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/times.cpython-38.pyc
  67. 266 0
      ambt-preden-monthlymigration-dev/package/pymysql/_auth.py
  68. 209 0
      ambt-preden-monthlymigration-dev/package/pymysql/charset.py
  69. 1367 0
      ambt-preden-monthlymigration-dev/package/pymysql/connections.py
  70. 38 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/CLIENT.py
  71. 32 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/COMMAND.py
  72. 68 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/CR.py
  73. 474 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/ER.py
  74. 31 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/FIELD_TYPE.py
  75. 15 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/FLAG.py
  76. 10 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/SERVER_STATUS.py
  77. 0 0
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__init__.py
  78. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/CLIENT.cpython-38.pyc
  79. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/COMMAND.cpython-38.pyc
  80. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/CR.cpython-38.pyc
  81. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/ER.cpython-38.pyc
  82. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc
  83. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/FLAG.cpython-38.pyc
  84. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc
  85. BIN
      ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/__init__.cpython-38.pyc
  86. 364 0
      ambt-preden-monthlymigration-dev/package/pymysql/converters.py
  87. 496 0
      ambt-preden-monthlymigration-dev/package/pymysql/cursors.py
  88. 143 0
      ambt-preden-monthlymigration-dev/package/pymysql/err.py
  89. 18 0
      ambt-preden-monthlymigration-dev/package/pymysql/optionfile.py
  90. 358 0
      ambt-preden-monthlymigration-dev/package/pymysql/protocol.py
  91. 20 0
      ambt-preden-monthlymigration-dev/package/pymysql/times.py

+ 18 - 0
README.md

@@ -0,0 +1,18 @@
+# 앰비언트 플랫폼-댁내 에너지 사용량 예측(preden)
+
+  앰비언트 플랫폼 댁내 에너지 사용량 예측 프로젝트의 소스코드 저장소이다.
+  이 저장소는 AWS SageMaker ambt-preden instance와 연결되어 있다
+
+  코드는 SageMaker에서 사용되는 노트북과 관리,추적을 위한 람다 함수가 저장된다.
+  그외 프로젝트에 필요한 소스코드 또한 저장될 수 있다.
+
++ ### 폴더 설명
+  + #### ambt-preden-wholemigration-dev
+    + ##### Sagemaker notebook
+            단지서버 DAYENERGY, MONTHENERGY 테이블의 전체 데이터(준공일부터 실행일까지) 마이그레이션
+  + #### ambt-preden-dailymigration-dev
+    + ##### Lambda Function
+            단지서버 DAYENERGY 테이블 일(실행일 이전날) 마이그레이션
+  + #### ambt-preden-monthlymigration-dev
+    + ##### Lambda Function
+            단지서버 MONTHENERGY 테이블 월(실행일 이전달) 마이그레이션

+ 49 - 0
ambt-preden-dailymigration-dev/lambda_function.py

@@ -0,0 +1,49 @@
+import sys
+import pymysql
+import json
+import decimal
+import boto3
+import uuid
+import logging
+from datetime import datetime, timedelta
+
+class DecimalnDateTimeEncoder(json.JSONEncoder):
+    def default(self, obj):
+        if isinstance(obj, decimal.Decimal) or isinstance(obj, datetime):
+            return str(obj)
+        return json.JSONEncoder.default(self, obj)
+
+gurigalmae_host  = "121.163.199.101"
+gurigalmae_port = 3306
+gurigalmae_id = "valley"
+gurigalmae_pw = "valley123"
+gurigalmae_dbname = "valleydb"
+
+def lambda_handler(event, context):
+    logger = logging.getLogger()
+    logger.setLevel(logging.INFO)
+    try:
+        conn = pymysql.connect(host=gurigalmae_host, port=gurigalmae_port, user=gurigalmae_id, passwd=gurigalmae_pw, db=gurigalmae_dbname, connect_timeout=5)
+    except:
+        logger.error("ERROR: Unexpected error: Could not connect to MySql instance.")
+        sys.exit()
+
+    logger.info("SUCCESS: Connection to RDS mysql instance succeeded")
+    
+    today = datetime.now()
+    yesterday = today - timedelta(days=1)
+    
+    sql = "SELECT * FROM DAYENERGY WHERE ENERGY_YEAR={} AND ENERGY_MONTH={} AND ENERGY_DAY={}".format(yesterday.year, yesterday.month, yesterday.day)
+    with conn.cursor(pymysql.cursors.DictCursor) as cur:
+        cur.execute(sql)
+        logger.info(sql)
+        rows = cur.fetchall()
+
+    data = ""
+    for row in rows:
+        data += json.dumps(row, cls=DecimalnDateTimeEncoder) + '\n'
+        logger.info(row)
+
+    s3=boto3.resource('s3')
+    object = s3.Object('hdci-ambt-homenetserver-raw','dev/site_name=gurigalmae/table_name=dayenergy/year={}/month={}/ambt-preden-lambda-migration-dev-{}-{}-{}-{}'.format(yesterday.year, yesterday.month, yesterday.year, yesterday.month, yesterday.day, str(uuid.uuid4())))
+    return object.put(Body=data)

BIN
ambt-preden-dailymigration-dev/my-deployment-package.zip


+ 1 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 19 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/LICENSE

@@ -0,0 +1,19 @@
+Copyright (c) 2010, 2013 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.

+ 180 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/METADATA

@@ -0,0 +1,180 @@
+Metadata-Version: 2.1
+Name: PyMySQL
+Version: 1.0.2
+Summary: Pure Python MySQL Driver
+Home-page: https://github.com/PyMySQL/PyMySQL/
+Author: yutaka.matsubara
+Author-email: yutaka.matsubara@gmail.com
+Maintainer: Inada Naoki
+Maintainer-email: songofacandy@gmail.com
+License: "MIT"
+Project-URL: Documentation, https://pymysql.readthedocs.io/
+Keywords: MySQL
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Database
+Requires-Python: >=3.6
+Provides-Extra: ed25519
+Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
+Provides-Extra: rsa
+Requires-Dist: cryptography ; extra == 'rsa'
+
+.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest
+    :target: https://pymysql.readthedocs.io/
+    :alt: Documentation Status
+
+.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github
+    :target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master
+
+.. image:: https://img.shields.io/lgtm/grade/python/g/PyMySQL/PyMySQL.svg?logo=lgtm&logoWidth=18
+    :target: https://lgtm.com/projects/g/PyMySQL/PyMySQL/context:python
+
+
+PyMySQL
+=======
+
+.. contents:: Table of Contents
+   :local:
+
+This package contains a pure-Python MySQL client library, based on `PEP 249`_.
+
+Most public APIs are compatible with mysqlclient and MySQLdb.
+
+NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`,
+`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_.
+But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover
+their usecase.
+
+.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/
+
+
+Requirements
+-------------
+
+* Python -- one of the following:
+
+  - CPython_ : 3.6 and newer
+  - PyPy_ : Latest 3.x version
+
+* MySQL Server -- one of the following:
+
+  - MySQL_ >= 5.6
+  - MariaDB_ >= 10.0
+
+.. _CPython: https://www.python.org/
+.. _PyPy: https://pypy.org/
+.. _MySQL: https://www.mysql.com/
+.. _MariaDB: https://mariadb.org/
+
+
+Installation
+------------
+
+Package is uploaded on `PyPI <https://pypi.org/project/PyMySQL>`_.
+
+You can install it with pip::
+
+    $ python3 -m pip install PyMySQL
+
+To use "sha256_password" or "caching_sha2_password" for authenticate,
+you need to install additional dependency::
+
+   $ python3 -m pip install PyMySQL[rsa]
+
+To use MariaDB's "ed25519" authentication method, you need to install
+additional dependency::
+
+   $ python3 -m pip install PyMySQL[ed25519]
+
+
+Documentation
+-------------
+
+Documentation is available online: https://pymysql.readthedocs.io/
+
+For support, please refer to the `StackOverflow
+<https://stackoverflow.com/questions/tagged/pymysql>`_.
+
+
+Example
+-------
+
+The following examples make use of a simple table
+
+.. code:: sql
+
+   CREATE TABLE `users` (
+       `id` int(11) NOT NULL AUTO_INCREMENT,
+       `email` varchar(255) COLLATE utf8_bin NOT NULL,
+       `password` varchar(255) COLLATE utf8_bin NOT NULL,
+       PRIMARY KEY (`id`)
+   ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin
+   AUTO_INCREMENT=1 ;
+
+
+.. code:: python
+
+    import pymysql.cursors
+
+    # Connect to the database
+    connection = pymysql.connect(host='localhost',
+                                 user='user',
+                                 password='passwd',
+                                 database='db',
+                                 cursorclass=pymysql.cursors.DictCursor)
+
+    with connection:
+        with connection.cursor() as cursor:
+            # Create a new record
+            sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
+            cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
+
+        # connection is not autocommit by default. So you must commit to save
+        # your changes.
+        connection.commit()
+
+        with connection.cursor() as cursor:
+            # Read a single record
+            sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
+            cursor.execute(sql, ('webmaster@python.org',))
+            result = cursor.fetchone()
+            print(result)
+
+
+This example will print:
+
+.. code:: python
+
+    {'password': 'very-secret', 'id': 1}
+
+
+Resources
+---------
+
+* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/
+
+* MySQL Reference Manuals: https://dev.mysql.com/doc/
+
+* MySQL client/server protocol:
+  https://dev.mysql.com/doc/internals/en/client-server-protocol.html
+
+* "Connector" channel in MySQL Community Slack:
+  https://lefred.be/mysql-community-on-slack/
+
+* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users
+
+License
+-------
+
+PyMySQL is released under the MIT License. See LICENSE for more information.
+
+

+ 43 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/RECORD

@@ -0,0 +1,43 @@
+PyMySQL-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+PyMySQL-1.0.2.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
+PyMySQL-1.0.2.dist-info/METADATA,sha256=hz4Fdo8sOFKcNqZ8wp4Bp-txNCOBCnw9-leYR7QBZ5I,5119
+PyMySQL-1.0.2.dist-info/RECORD,,
+PyMySQL-1.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+PyMySQL-1.0.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
+PyMySQL-1.0.2.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
+pymysql/__init__.py,sha256=XL7skPUK4cbKiek68T0vMob-L4YkIRLb2KX4hdMZVvM,4391
+pymysql/__pycache__/__init__.cpython-38.pyc,,
+pymysql/__pycache__/_auth.cpython-38.pyc,,
+pymysql/__pycache__/charset.cpython-38.pyc,,
+pymysql/__pycache__/connections.cpython-38.pyc,,
+pymysql/__pycache__/converters.cpython-38.pyc,,
+pymysql/__pycache__/cursors.cpython-38.pyc,,
+pymysql/__pycache__/err.cpython-38.pyc,,
+pymysql/__pycache__/optionfile.cpython-38.pyc,,
+pymysql/__pycache__/protocol.cpython-38.pyc,,
+pymysql/__pycache__/times.cpython-38.pyc,,
+pymysql/_auth.py,sha256=l1VtBwDpCtTkalgYQFASO-rj-vEd3DGYR8g-eQjNF1U,7399
+pymysql/charset.py,sha256=JCvshFnNf4vzkpXc6uPCyg07qGNfZaVZoxrFqzVlKFs,10293
+pymysql/connections.py,sha256=EwKWqFIWlx6kbOeDFIhMFpjJ9-pyF140E5ouKgrrYfY,51251
+pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878
+pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679
+pymysql/constants/CR.py,sha256=oHyD9dnR1DUX7hd42rcamMnFrWhjUZz7E4S6qQWSQb4,1927
+pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
+pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
+pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
+pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333
+pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pymysql/constants/__pycache__/CLIENT.cpython-38.pyc,,
+pymysql/constants/__pycache__/COMMAND.cpython-38.pyc,,
+pymysql/constants/__pycache__/CR.cpython-38.pyc,,
+pymysql/constants/__pycache__/ER.cpython-38.pyc,,
+pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc,,
+pymysql/constants/__pycache__/FLAG.cpython-38.pyc,,
+pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc,,
+pymysql/constants/__pycache__/__init__.cpython-38.pyc,,
+pymysql/converters.py,sha256=MBXTOCXSyewMculaRliBEzPVkOKXLiRMqvIXih9Akrg,9430
+pymysql/cursors.py,sha256=1E79f3vysxygyfZMhvR6-yFDfysRn3Go8xZTywteh4o,15366
+pymysql/err.py,sha256=bpxayM4IUnFQAd8bUZ3PFsFomi9QSfBk-0TJXyKU2FI,3773
+pymysql/optionfile.py,sha256=ehPrZW4d7pcEvXGAEpsKgLdXpFnIQD93yF7T_jHjoRk,573
+pymysql/protocol.py,sha256=Ur8xXkVvyFc6m5CA34QrHBasADvS_NPFsWU-Q3flRYA,11859
+pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360

+ 0 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/REQUESTED


+ 5 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+

+ 1 - 0
ambt-preden-dailymigration-dev/package/PyMySQL-1.0.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+pymysql

+ 185 - 0
ambt-preden-dailymigration-dev/package/pymysql/__init__.py

@@ -0,0 +1,185 @@
+"""
+PyMySQL: A pure-Python MySQL client library.
+
+Copyright (c) 2010-2016 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+import sys
+
+from .constants import FIELD_TYPE
+from .err import (
+    Warning,
+    Error,
+    InterfaceError,
+    DataError,
+    DatabaseError,
+    OperationalError,
+    IntegrityError,
+    InternalError,
+    NotSupportedError,
+    ProgrammingError,
+    MySQLError,
+)
+from .times import (
+    Date,
+    Time,
+    Timestamp,
+    DateFromTicks,
+    TimeFromTicks,
+    TimestampFromTicks,
+)
+
+
+VERSION = (1, 0, 2, None)
+if VERSION[3] is not None:
+    VERSION_STRING = "%d.%d.%d_%s" % VERSION
+else:
+    VERSION_STRING = "%d.%d.%d" % VERSION[:3]
+threadsafety = 1
+apilevel = "2.0"
+paramstyle = "pyformat"
+
+from . import connections  # noqa: E402
+
+
+class DBAPISet(frozenset):
+    def __ne__(self, other):
+        if isinstance(other, set):
+            return frozenset.__ne__(self, other)
+        else:
+            return other not in self
+
+    def __eq__(self, other):
+        if isinstance(other, frozenset):
+            return frozenset.__eq__(self, other)
+        else:
+            return other in self
+
+    def __hash__(self):
+        return frozenset.__hash__(self)
+
+
+STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING])
+BINARY = DBAPISet(
+    [
+        FIELD_TYPE.BLOB,
+        FIELD_TYPE.LONG_BLOB,
+        FIELD_TYPE.MEDIUM_BLOB,
+        FIELD_TYPE.TINY_BLOB,
+    ]
+)
+NUMBER = DBAPISet(
+    [
+        FIELD_TYPE.DECIMAL,
+        FIELD_TYPE.DOUBLE,
+        FIELD_TYPE.FLOAT,
+        FIELD_TYPE.INT24,
+        FIELD_TYPE.LONG,
+        FIELD_TYPE.LONGLONG,
+        FIELD_TYPE.TINY,
+        FIELD_TYPE.YEAR,
+    ]
+)
+DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
+TIME = DBAPISet([FIELD_TYPE.TIME])
+TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
+DATETIME = TIMESTAMP
+ROWID = DBAPISet()
+
+
+def Binary(x):
+    """Return x as a binary type."""
+    return bytes(x)
+
+
+Connect = connect = Connection = connections.Connection
+
+
+def get_client_info():  # for MySQLdb compatibility
+    version = VERSION
+    if VERSION[3] is None:
+        version = VERSION[:3]
+    return ".".join(map(str, version))
+
+
+# we include a doctored version_info here for MySQLdb compatibility
+version_info = (1, 4, 0, "final", 0)
+
+NULL = "NULL"
+
+__version__ = get_client_info()
+
+
+def thread_safe():
+    return True  # match MySQLdb.thread_safe()
+
+
+def install_as_MySQLdb():
+    """
+    After this function is called, any application that imports MySQLdb or
+    _mysql will unwittingly actually use pymysql.
+    """
+    sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"]
+
+
+__all__ = [
+    "BINARY",
+    "Binary",
+    "Connect",
+    "Connection",
+    "DATE",
+    "Date",
+    "Time",
+    "Timestamp",
+    "DateFromTicks",
+    "TimeFromTicks",
+    "TimestampFromTicks",
+    "DataError",
+    "DatabaseError",
+    "Error",
+    "FIELD_TYPE",
+    "IntegrityError",
+    "InterfaceError",
+    "InternalError",
+    "MySQLError",
+    "NULL",
+    "NUMBER",
+    "NotSupportedError",
+    "DBAPISet",
+    "OperationalError",
+    "ProgrammingError",
+    "ROWID",
+    "STRING",
+    "TIME",
+    "TIMESTAMP",
+    "Warning",
+    "apilevel",
+    "connect",
+    "connections",
+    "constants",
+    "converters",
+    "cursors",
+    "get_client_info",
+    "paramstyle",
+    "threadsafety",
+    "version_info",
+    "install_as_MySQLdb",
+    "__version__",
+]

BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/__init__.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/_auth.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/charset.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/connections.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/converters.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/cursors.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/err.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/optionfile.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/protocol.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/__pycache__/times.cpython-38.pyc


+ 266 - 0
ambt-preden-dailymigration-dev/package/pymysql/_auth.py

@@ -0,0 +1,266 @@
+"""
+Implements auth methods
+"""
+from .err import OperationalError
+
+
+try:
+    from cryptography.hazmat.backends import default_backend
+    from cryptography.hazmat.primitives import serialization, hashes
+    from cryptography.hazmat.primitives.asymmetric import padding
+
+    _have_cryptography = True
+except ImportError:
+    _have_cryptography = False
+
+from functools import partial
+import hashlib
+
+
+DEBUG = False
+SCRAMBLE_LENGTH = 20
+sha1_new = partial(hashlib.new, "sha1")
+
+
+# mysql_native_password
+# https://dev.mysql.com/doc/internals/en/secure-password-authentication.html#packet-Authentication::Native41
+
+
+def scramble_native_password(password, message):
+    """Scramble used for mysql_native_password"""
+    if not password:
+        return b""
+
+    stage1 = sha1_new(password).digest()
+    stage2 = sha1_new(stage1).digest()
+    s = sha1_new()
+    s.update(message[:SCRAMBLE_LENGTH])
+    s.update(stage2)
+    result = s.digest()
+    return _my_crypt(result, stage1)
+
+
+def _my_crypt(message1, message2):
+    result = bytearray(message1)
+
+    for i in range(len(result)):
+        result[i] ^= message2[i]
+
+    return bytes(result)
+
+
+# MariaDB's client_ed25519-plugin
+# https://mariadb.com/kb/en/library/connection/#client_ed25519-plugin
+
+_nacl_bindings = False
+
+
+def _init_nacl():
+    global _nacl_bindings
+    try:
+        from nacl import bindings
+
+        _nacl_bindings = bindings
+    except ImportError:
+        raise RuntimeError(
+            "'pynacl' package is required for ed25519_password auth method"
+        )
+
+
+def _scalar_clamp(s32):
+    ba = bytearray(s32)
+    ba0 = bytes(bytearray([ba[0] & 248]))
+    ba31 = bytes(bytearray([(ba[31] & 127) | 64]))
+    return ba0 + bytes(s32[1:31]) + ba31
+
+
+def ed25519_password(password, scramble):
+    """Sign a random scramble with elliptic curve Ed25519.
+
+    Secret and public key are derived from password.
+    """
+    # variable names based on rfc8032 section-5.1.6
+    #
+    if not _nacl_bindings:
+        _init_nacl()
+
+    # h = SHA512(password)
+    h = hashlib.sha512(password).digest()
+
+    # s = prune(first_half(h))
+    s = _scalar_clamp(h[:32])
+
+    # r = SHA512(second_half(h) || M)
+    r = hashlib.sha512(h[32:] + scramble).digest()
+
+    # R = encoded point [r]B
+    r = _nacl_bindings.crypto_core_ed25519_scalar_reduce(r)
+    R = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(r)
+
+    # A = encoded point [s]B
+    A = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(s)
+
+    # k = SHA512(R || A || M)
+    k = hashlib.sha512(R + A + scramble).digest()
+
+    # S = (k * s + r) mod L
+    k = _nacl_bindings.crypto_core_ed25519_scalar_reduce(k)
+    ks = _nacl_bindings.crypto_core_ed25519_scalar_mul(k, s)
+    S = _nacl_bindings.crypto_core_ed25519_scalar_add(ks, r)
+
+    # signature = R || S
+    return R + S
+
+
+# sha256_password
+
+
+def _roundtrip(conn, send_data):
+    conn.write_packet(send_data)
+    pkt = conn._read_packet()
+    pkt.check_error()
+    return pkt
+
+
+def _xor_password(password, salt):
+    # Trailing NUL character will be added in Auth Switch Request.
+    # See https://github.com/mysql/mysql-server/blob/7d10c82196c8e45554f27c00681474a9fb86d137/sql/auth/sha2_password.cc#L939-L945
+    salt = salt[:SCRAMBLE_LENGTH]
+    password_bytes = bytearray(password)
+    # salt = bytearray(salt)  # for PY2 compat.
+    salt_len = len(salt)
+    for i in range(len(password_bytes)):
+        password_bytes[i] ^= salt[i % salt_len]
+    return bytes(password_bytes)
+
+
+def sha2_rsa_encrypt(password, salt, public_key):
+    """Encrypt password with salt and public_key.
+
+    Used for sha256_password and caching_sha2_password.
+    """
+    if not _have_cryptography:
+        raise RuntimeError(
+            "'cryptography' package is required for sha256_password or caching_sha2_password auth methods"
+        )
+    message = _xor_password(password + b"\0", salt)
+    rsa_key = serialization.load_pem_public_key(public_key, default_backend())
+    return rsa_key.encrypt(
+        message,
+        padding.OAEP(
+            mgf=padding.MGF1(algorithm=hashes.SHA1()),
+            algorithm=hashes.SHA1(),
+            label=None,
+        ),
+    )
+
+
+def sha256_password_auth(conn, pkt):
+    if conn._secure:
+        if DEBUG:
+            print("sha256: Sending plain password")
+        data = conn.password + b"\0"
+        return _roundtrip(conn, data)
+
+    if pkt.is_auth_switch_request():
+        conn.salt = pkt.read_all()
+        if not conn.server_public_key and conn.password:
+            # Request server public key
+            if DEBUG:
+                print("sha256: Requesting server public key")
+            pkt = _roundtrip(conn, b"\1")
+
+    if pkt.is_extra_auth_data():
+        conn.server_public_key = pkt._data[1:]
+        if DEBUG:
+            print("Received public key:\n", conn.server_public_key.decode("ascii"))
+
+    if conn.password:
+        if not conn.server_public_key:
+            raise OperationalError("Couldn't receive server's public key")
+
+        data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
+    else:
+        data = b""
+
+    return _roundtrip(conn, data)
+
+
+def scramble_caching_sha2(password, nonce):
+    # (bytes, bytes) -> bytes
+    """Scramble algorithm used in cached_sha2_password fast path.
+
+    XOR(SHA256(password), SHA256(SHA256(SHA256(password)), nonce))
+    """
+    if not password:
+        return b""
+
+    p1 = hashlib.sha256(password).digest()
+    p2 = hashlib.sha256(p1).digest()
+    p3 = hashlib.sha256(p2 + nonce).digest()
+
+    res = bytearray(p1)
+    for i in range(len(p3)):
+        res[i] ^= p3[i]
+
+    return bytes(res)
+
+
+def caching_sha2_password_auth(conn, pkt):
+    # No password fast path
+    if not conn.password:
+        return _roundtrip(conn, b"")
+
+    if pkt.is_auth_switch_request():
+        # Try from fast auth
+        if DEBUG:
+            print("caching sha2: Trying fast path")
+        conn.salt = pkt.read_all()
+        scrambled = scramble_caching_sha2(conn.password, conn.salt)
+        pkt = _roundtrip(conn, scrambled)
+    # else: fast auth is tried in initial handshake
+
+    if not pkt.is_extra_auth_data():
+        raise OperationalError(
+            "caching sha2: Unknown packet for fast auth: %s" % pkt._data[:1]
+        )
+
+    # magic numbers:
+    # 2 - request public key
+    # 3 - fast auth succeeded
+    # 4 - need full auth
+
+    pkt.advance(1)
+    n = pkt.read_uint8()
+
+    if n == 3:
+        if DEBUG:
+            print("caching sha2: succeeded by fast path.")
+        pkt = conn._read_packet()
+        pkt.check_error()  # pkt must be OK packet
+        return pkt
+
+    if n != 4:
+        raise OperationalError("caching sha2: Unknwon result for fast auth: %s" % n)
+
+    if DEBUG:
+        print("caching sha2: Trying full auth...")
+
+    if conn._secure:
+        if DEBUG:
+            print("caching sha2: Sending plain password via secure connection")
+        return _roundtrip(conn, conn.password + b"\0")
+
+    if not conn.server_public_key:
+        pkt = _roundtrip(conn, b"\x02")  # Request public key
+        if not pkt.is_extra_auth_data():
+            raise OperationalError(
+                "caching sha2: Unknown packet for public key: %s" % pkt._data[:1]
+            )
+
+        conn.server_public_key = pkt._data[1:]
+        if DEBUG:
+            print(conn.server_public_key.decode("ascii"))
+
+    data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
+    pkt = _roundtrip(conn, data)

+ 209 - 0
ambt-preden-dailymigration-dev/package/pymysql/charset.py

@@ -0,0 +1,209 @@
+MBLENGTH = {8: 1, 33: 3, 88: 2, 91: 2}
+
+
+class Charset:
+    def __init__(self, id, name, collation, is_default):
+        self.id, self.name, self.collation = id, name, collation
+        self.is_default = is_default == "Yes"
+
+    def __repr__(self):
+        return "Charset(id=%s, name=%r, collation=%r)" % (
+            self.id,
+            self.name,
+            self.collation,
+        )
+
+    @property
+    def encoding(self):
+        name = self.name
+        if name in ("utf8mb4", "utf8mb3"):
+            return "utf8"
+        if name == "latin1":
+            return "cp1252"
+        if name == "koi8r":
+            return "koi8_r"
+        if name == "koi8u":
+            return "koi8_u"
+        return name
+
+    @property
+    def is_binary(self):
+        return self.id == 63
+
+
+class Charsets:
+    def __init__(self):
+        self._by_id = {}
+        self._by_name = {}
+
+    def add(self, c):
+        self._by_id[c.id] = c
+        if c.is_default:
+            self._by_name[c.name] = c
+
+    def by_id(self, id):
+        return self._by_id[id]
+
+    def by_name(self, name):
+        return self._by_name.get(name.lower())
+
+
+_charsets = Charsets()
+"""
+Generated with:
+
+mysql -N -s -e "select id, character_set_name, collation_name, is_default
+from information_schema.collations order by id;" | python -c "import sys
+for l in sys.stdin.readlines():
+        id, name, collation, is_default  = l.split(chr(9))
+        print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \
+                % (id, name, collation, is_default.strip())
+"
+
+"""
+_charsets.add(Charset(1, "big5", "big5_chinese_ci", "Yes"))
+_charsets.add(Charset(2, "latin2", "latin2_czech_cs", ""))
+_charsets.add(Charset(3, "dec8", "dec8_swedish_ci", "Yes"))
+_charsets.add(Charset(4, "cp850", "cp850_general_ci", "Yes"))
+_charsets.add(Charset(5, "latin1", "latin1_german1_ci", ""))
+_charsets.add(Charset(6, "hp8", "hp8_english_ci", "Yes"))
+_charsets.add(Charset(7, "koi8r", "koi8r_general_ci", "Yes"))
+_charsets.add(Charset(8, "latin1", "latin1_swedish_ci", "Yes"))
+_charsets.add(Charset(9, "latin2", "latin2_general_ci", "Yes"))
+_charsets.add(Charset(10, "swe7", "swe7_swedish_ci", "Yes"))
+_charsets.add(Charset(11, "ascii", "ascii_general_ci", "Yes"))
+_charsets.add(Charset(12, "ujis", "ujis_japanese_ci", "Yes"))
+_charsets.add(Charset(13, "sjis", "sjis_japanese_ci", "Yes"))
+_charsets.add(Charset(14, "cp1251", "cp1251_bulgarian_ci", ""))
+_charsets.add(Charset(15, "latin1", "latin1_danish_ci", ""))
+_charsets.add(Charset(16, "hebrew", "hebrew_general_ci", "Yes"))
+_charsets.add(Charset(18, "tis620", "tis620_thai_ci", "Yes"))
+_charsets.add(Charset(19, "euckr", "euckr_korean_ci", "Yes"))
+_charsets.add(Charset(20, "latin7", "latin7_estonian_cs", ""))
+_charsets.add(Charset(21, "latin2", "latin2_hungarian_ci", ""))
+_charsets.add(Charset(22, "koi8u", "koi8u_general_ci", "Yes"))
+_charsets.add(Charset(23, "cp1251", "cp1251_ukrainian_ci", ""))
+_charsets.add(Charset(24, "gb2312", "gb2312_chinese_ci", "Yes"))
+_charsets.add(Charset(25, "greek", "greek_general_ci", "Yes"))
+_charsets.add(Charset(26, "cp1250", "cp1250_general_ci", "Yes"))
+_charsets.add(Charset(27, "latin2", "latin2_croatian_ci", ""))
+_charsets.add(Charset(28, "gbk", "gbk_chinese_ci", "Yes"))
+_charsets.add(Charset(29, "cp1257", "cp1257_lithuanian_ci", ""))
+_charsets.add(Charset(30, "latin5", "latin5_turkish_ci", "Yes"))
+_charsets.add(Charset(31, "latin1", "latin1_german2_ci", ""))
+_charsets.add(Charset(32, "armscii8", "armscii8_general_ci", "Yes"))
+_charsets.add(Charset(33, "utf8", "utf8_general_ci", "Yes"))
+_charsets.add(Charset(34, "cp1250", "cp1250_czech_cs", ""))
+_charsets.add(Charset(36, "cp866", "cp866_general_ci", "Yes"))
+_charsets.add(Charset(37, "keybcs2", "keybcs2_general_ci", "Yes"))
+_charsets.add(Charset(38, "macce", "macce_general_ci", "Yes"))
+_charsets.add(Charset(39, "macroman", "macroman_general_ci", "Yes"))
+_charsets.add(Charset(40, "cp852", "cp852_general_ci", "Yes"))
+_charsets.add(Charset(41, "latin7", "latin7_general_ci", "Yes"))
+_charsets.add(Charset(42, "latin7", "latin7_general_cs", ""))
+_charsets.add(Charset(43, "macce", "macce_bin", ""))
+_charsets.add(Charset(44, "cp1250", "cp1250_croatian_ci", ""))
+_charsets.add(Charset(45, "utf8mb4", "utf8mb4_general_ci", "Yes"))
+_charsets.add(Charset(46, "utf8mb4", "utf8mb4_bin", ""))
+_charsets.add(Charset(47, "latin1", "latin1_bin", ""))
+_charsets.add(Charset(48, "latin1", "latin1_general_ci", ""))
+_charsets.add(Charset(49, "latin1", "latin1_general_cs", ""))
+_charsets.add(Charset(50, "cp1251", "cp1251_bin", ""))
+_charsets.add(Charset(51, "cp1251", "cp1251_general_ci", "Yes"))
+_charsets.add(Charset(52, "cp1251", "cp1251_general_cs", ""))
+_charsets.add(Charset(53, "macroman", "macroman_bin", ""))
+_charsets.add(Charset(57, "cp1256", "cp1256_general_ci", "Yes"))
+_charsets.add(Charset(58, "cp1257", "cp1257_bin", ""))
+_charsets.add(Charset(59, "cp1257", "cp1257_general_ci", "Yes"))
+_charsets.add(Charset(63, "binary", "binary", "Yes"))
+_charsets.add(Charset(64, "armscii8", "armscii8_bin", ""))
+_charsets.add(Charset(65, "ascii", "ascii_bin", ""))
+_charsets.add(Charset(66, "cp1250", "cp1250_bin", ""))
+_charsets.add(Charset(67, "cp1256", "cp1256_bin", ""))
+_charsets.add(Charset(68, "cp866", "cp866_bin", ""))
+_charsets.add(Charset(69, "dec8", "dec8_bin", ""))
+_charsets.add(Charset(70, "greek", "greek_bin", ""))
+_charsets.add(Charset(71, "hebrew", "hebrew_bin", ""))
+_charsets.add(Charset(72, "hp8", "hp8_bin", ""))
+_charsets.add(Charset(73, "keybcs2", "keybcs2_bin", ""))
+_charsets.add(Charset(74, "koi8r", "koi8r_bin", ""))
+_charsets.add(Charset(75, "koi8u", "koi8u_bin", ""))
+_charsets.add(Charset(76, "utf8", "utf8_tolower_ci", ""))
+_charsets.add(Charset(77, "latin2", "latin2_bin", ""))
+_charsets.add(Charset(78, "latin5", "latin5_bin", ""))
+_charsets.add(Charset(79, "latin7", "latin7_bin", ""))
+_charsets.add(Charset(80, "cp850", "cp850_bin", ""))
+_charsets.add(Charset(81, "cp852", "cp852_bin", ""))
+_charsets.add(Charset(82, "swe7", "swe7_bin", ""))
+_charsets.add(Charset(83, "utf8", "utf8_bin", ""))
+_charsets.add(Charset(84, "big5", "big5_bin", ""))
+_charsets.add(Charset(85, "euckr", "euckr_bin", ""))
+_charsets.add(Charset(86, "gb2312", "gb2312_bin", ""))
+_charsets.add(Charset(87, "gbk", "gbk_bin", ""))
+_charsets.add(Charset(88, "sjis", "sjis_bin", ""))
+_charsets.add(Charset(89, "tis620", "tis620_bin", ""))
+_charsets.add(Charset(91, "ujis", "ujis_bin", ""))
+_charsets.add(Charset(92, "geostd8", "geostd8_general_ci", "Yes"))
+_charsets.add(Charset(93, "geostd8", "geostd8_bin", ""))
+_charsets.add(Charset(94, "latin1", "latin1_spanish_ci", ""))
+_charsets.add(Charset(95, "cp932", "cp932_japanese_ci", "Yes"))
+_charsets.add(Charset(96, "cp932", "cp932_bin", ""))
+_charsets.add(Charset(97, "eucjpms", "eucjpms_japanese_ci", "Yes"))
+_charsets.add(Charset(98, "eucjpms", "eucjpms_bin", ""))
+_charsets.add(Charset(99, "cp1250", "cp1250_polish_ci", ""))
+_charsets.add(Charset(192, "utf8", "utf8_unicode_ci", ""))
+_charsets.add(Charset(193, "utf8", "utf8_icelandic_ci", ""))
+_charsets.add(Charset(194, "utf8", "utf8_latvian_ci", ""))
+_charsets.add(Charset(195, "utf8", "utf8_romanian_ci", ""))
+_charsets.add(Charset(196, "utf8", "utf8_slovenian_ci", ""))
+_charsets.add(Charset(197, "utf8", "utf8_polish_ci", ""))
+_charsets.add(Charset(198, "utf8", "utf8_estonian_ci", ""))
+_charsets.add(Charset(199, "utf8", "utf8_spanish_ci", ""))
+_charsets.add(Charset(200, "utf8", "utf8_swedish_ci", ""))
+_charsets.add(Charset(201, "utf8", "utf8_turkish_ci", ""))
+_charsets.add(Charset(202, "utf8", "utf8_czech_ci", ""))
+_charsets.add(Charset(203, "utf8", "utf8_danish_ci", ""))
+_charsets.add(Charset(204, "utf8", "utf8_lithuanian_ci", ""))
+_charsets.add(Charset(205, "utf8", "utf8_slovak_ci", ""))
+_charsets.add(Charset(206, "utf8", "utf8_spanish2_ci", ""))
+_charsets.add(Charset(207, "utf8", "utf8_roman_ci", ""))
+_charsets.add(Charset(208, "utf8", "utf8_persian_ci", ""))
+_charsets.add(Charset(209, "utf8", "utf8_esperanto_ci", ""))
+_charsets.add(Charset(210, "utf8", "utf8_hungarian_ci", ""))
+_charsets.add(Charset(211, "utf8", "utf8_sinhala_ci", ""))
+_charsets.add(Charset(212, "utf8", "utf8_german2_ci", ""))
+_charsets.add(Charset(213, "utf8", "utf8_croatian_ci", ""))
+_charsets.add(Charset(214, "utf8", "utf8_unicode_520_ci", ""))
+_charsets.add(Charset(215, "utf8", "utf8_vietnamese_ci", ""))
+_charsets.add(Charset(223, "utf8", "utf8_general_mysql500_ci", ""))
+_charsets.add(Charset(224, "utf8mb4", "utf8mb4_unicode_ci", ""))
+_charsets.add(Charset(225, "utf8mb4", "utf8mb4_icelandic_ci", ""))
+_charsets.add(Charset(226, "utf8mb4", "utf8mb4_latvian_ci", ""))
+_charsets.add(Charset(227, "utf8mb4", "utf8mb4_romanian_ci", ""))
+_charsets.add(Charset(228, "utf8mb4", "utf8mb4_slovenian_ci", ""))
+_charsets.add(Charset(229, "utf8mb4", "utf8mb4_polish_ci", ""))
+_charsets.add(Charset(230, "utf8mb4", "utf8mb4_estonian_ci", ""))
+_charsets.add(Charset(231, "utf8mb4", "utf8mb4_spanish_ci", ""))
+_charsets.add(Charset(232, "utf8mb4", "utf8mb4_swedish_ci", ""))
+_charsets.add(Charset(233, "utf8mb4", "utf8mb4_turkish_ci", ""))
+_charsets.add(Charset(234, "utf8mb4", "utf8mb4_czech_ci", ""))
+_charsets.add(Charset(235, "utf8mb4", "utf8mb4_danish_ci", ""))
+_charsets.add(Charset(236, "utf8mb4", "utf8mb4_lithuanian_ci", ""))
+_charsets.add(Charset(237, "utf8mb4", "utf8mb4_slovak_ci", ""))
+_charsets.add(Charset(238, "utf8mb4", "utf8mb4_spanish2_ci", ""))
+_charsets.add(Charset(239, "utf8mb4", "utf8mb4_roman_ci", ""))
+_charsets.add(Charset(240, "utf8mb4", "utf8mb4_persian_ci", ""))
+_charsets.add(Charset(241, "utf8mb4", "utf8mb4_esperanto_ci", ""))
+_charsets.add(Charset(242, "utf8mb4", "utf8mb4_hungarian_ci", ""))
+_charsets.add(Charset(243, "utf8mb4", "utf8mb4_sinhala_ci", ""))
+_charsets.add(Charset(244, "utf8mb4", "utf8mb4_german2_ci", ""))
+_charsets.add(Charset(245, "utf8mb4", "utf8mb4_croatian_ci", ""))
+_charsets.add(Charset(246, "utf8mb4", "utf8mb4_unicode_520_ci", ""))
+_charsets.add(Charset(247, "utf8mb4", "utf8mb4_vietnamese_ci", ""))
+_charsets.add(Charset(248, "gb18030", "gb18030_chinese_ci", "Yes"))
+_charsets.add(Charset(249, "gb18030", "gb18030_bin", ""))
+_charsets.add(Charset(250, "gb18030", "gb18030_unicode_520_ci", ""))
+_charsets.add(Charset(255, "utf8mb4", "utf8mb4_0900_ai_ci", ""))
+
+charset_by_name = _charsets.by_name
+charset_by_id = _charsets.by_id

+ 1367 - 0
ambt-preden-dailymigration-dev/package/pymysql/connections.py

@@ -0,0 +1,1367 @@
+# Python implementation of the MySQL client-server protocol
+# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
+# Error codes:
+# https://dev.mysql.com/doc/refman/5.5/en/error-handling.html
+import errno
+import os
+import socket
+import struct
+import sys
+import traceback
+import warnings
+
+from . import _auth
+
+from .charset import charset_by_name, charset_by_id
+from .constants import CLIENT, COMMAND, CR, FIELD_TYPE, SERVER_STATUS
+from . import converters
+from .cursors import Cursor
+from .optionfile import Parser
+from .protocol import (
+    dump_packet,
+    MysqlPacket,
+    FieldDescriptorPacket,
+    OKPacketWrapper,
+    EOFPacketWrapper,
+    LoadLocalPacketWrapper,
+)
+from . import err, VERSION_STRING
+
+try:
+    import ssl
+
+    SSL_ENABLED = True
+except ImportError:
+    ssl = None
+    SSL_ENABLED = False
+
+try:
+    import getpass
+
+    DEFAULT_USER = getpass.getuser()
+    del getpass
+except (ImportError, KeyError):
+    # KeyError occurs when there's no entry in OS database for a current user.
+    DEFAULT_USER = None
+
+DEBUG = False
+
+TEXT_TYPES = {
+    FIELD_TYPE.BIT,
+    FIELD_TYPE.BLOB,
+    FIELD_TYPE.LONG_BLOB,
+    FIELD_TYPE.MEDIUM_BLOB,
+    FIELD_TYPE.STRING,
+    FIELD_TYPE.TINY_BLOB,
+    FIELD_TYPE.VAR_STRING,
+    FIELD_TYPE.VARCHAR,
+    FIELD_TYPE.GEOMETRY,
+}
+
+
+DEFAULT_CHARSET = "utf8mb4"
+
+MAX_PACKET_LEN = 2 ** 24 - 1
+
+
+def _pack_int24(n):
+    return struct.pack("<I", n)[:3]
+
+
+# https://dev.mysql.com/doc/internals/en/integer.html#packet-Protocol::LengthEncodedInteger
+def _lenenc_int(i):
+    if i < 0:
+        raise ValueError(
+            "Encoding %d is less than 0 - no representation in LengthEncodedInteger" % i
+        )
+    elif i < 0xFB:
+        return bytes([i])
+    elif i < (1 << 16):
+        return b"\xfc" + struct.pack("<H", i)
+    elif i < (1 << 24):
+        return b"\xfd" + struct.pack("<I", i)[:3]
+    elif i < (1 << 64):
+        return b"\xfe" + struct.pack("<Q", i)
+    else:
+        raise ValueError(
+            "Encoding %x is larger than %x - no representation in LengthEncodedInteger"
+            % (i, (1 << 64))
+        )
+
+
+class Connection:
+    """
+    Representation of a socket with a mysql server.
+
+    The proper way to get an instance of this class is to call
+    connect().
+
+    Establish a connection to the MySQL database. Accepts several
+    arguments:
+
+    :param host: Host where the database server is located
+    :param user: Username to log in as
+    :param password: Password to use.
+    :param database: Database to use, None to not use a particular one.
+    :param port: MySQL port to use, default is usually OK. (default: 3306)
+    :param bind_address: When the client has multiple network interfaces, specify
+        the interface from which to connect to the host. Argument can be
+        a hostname or an IP address.
+    :param unix_socket: Optionally, you can use a unix socket rather than TCP/IP.
+    :param read_timeout: The timeout for reading from the connection in seconds (default: None - no timeout)
+    :param write_timeout: The timeout for writing to the connection in seconds (default: None - no timeout)
+    :param charset: Charset you want to use.
+    :param sql_mode: Default SQL_MODE to use.
+    :param read_default_file:
+        Specifies  my.cnf file to read these parameters from under the [client] section.
+    :param conv:
+        Conversion dictionary to use instead of the default one.
+        This is used to provide custom marshalling and unmarshalling of types.
+        See converters.
+    :param use_unicode:
+        Whether or not to default to unicode strings.
+        This option defaults to true.
+    :param client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
+    :param cursorclass: Custom cursor class to use.
+    :param init_command: Initial SQL statement to run when connection is established.
+    :param connect_timeout: Timeout before throwing an exception when connecting.
+        (default: 10, min: 1, max: 31536000)
+    :param ssl:
+        A dict of arguments similar to mysql_ssl_set()'s parameters.
+    :param ssl_ca: Path to the file that contains a PEM-formatted CA certificate
+    :param ssl_cert: Path to the file that contains a PEM-formatted client certificate
+    :param ssl_disabled: A boolean value that disables usage of TLS
+    :param ssl_key: Path to the file that contains a PEM-formatted private key for the client certificate
+    :param ssl_verify_cert: Set to true to check the validity of server certificates
+    :param ssl_verify_identity: Set to true to check the server's identity
+    :param read_default_group: Group to read from in the configuration file.
+    :param autocommit: Autocommit mode. None means use server default. (default: False)
+    :param local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False)
+    :param max_allowed_packet: Max size of packet sent to server in bytes. (default: 16MB)
+        Only used to limit size of "LOAD LOCAL INFILE" data packet smaller than default (16KB).
+    :param defer_connect: Don't explicitly connect on construction - wait for connect call.
+        (default: False)
+    :param auth_plugin_map: A dict of plugin names to a class that processes that plugin.
+        The class will take the Connection object as the argument to the constructor.
+        The class needs an authenticate method taking an authentication packet as
+        an argument.  For the dialog plugin, a prompt(echo, prompt) method can be used
+        (if no authenticate method) for returning a string from the user. (experimental)
+    :param server_public_key: SHA256 authentication plugin public key value. (default: None)
+    :param binary_prefix: Add _binary prefix on bytes and bytearray. (default: False)
+    :param compress: Not supported
+    :param named_pipe: Not supported
+    :param db: **DEPRECATED** Alias for database.
+    :param passwd: **DEPRECATED** Alias for password.
+
+    See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_ in the
+    specification.
+    """
+
+    _sock = None
+    _auth_plugin_name = ""
+    _closed = False
+    _secure = False
+
+    def __init__(
+        self,
+        *,
+        user=None,  # The first four arguments is based on DB-API 2.0 recommendation.
+        password="",
+        host=None,
+        database=None,
+        unix_socket=None,
+        port=0,
+        charset="",
+        sql_mode=None,
+        read_default_file=None,
+        conv=None,
+        use_unicode=True,
+        client_flag=0,
+        cursorclass=Cursor,
+        init_command=None,
+        connect_timeout=10,
+        read_default_group=None,
+        autocommit=False,
+        local_infile=False,
+        max_allowed_packet=16 * 1024 * 1024,
+        defer_connect=False,
+        auth_plugin_map=None,
+        read_timeout=None,
+        write_timeout=None,
+        bind_address=None,
+        binary_prefix=False,
+        program_name=None,
+        server_public_key=None,
+        ssl=None,
+        ssl_ca=None,
+        ssl_cert=None,
+        ssl_disabled=None,
+        ssl_key=None,
+        ssl_verify_cert=None,
+        ssl_verify_identity=None,
+        compress=None,  # not supported
+        named_pipe=None,  # not supported
+        passwd=None,  # deprecated
+        db=None,  # deprecated
+    ):
+        if db is not None and database is None:
+            # We will raise warining in 2022 or later.
+            # See https://github.com/PyMySQL/PyMySQL/issues/939
+            # warnings.warn("'db' is deprecated, use 'database'", DeprecationWarning, 3)
+            database = db
+        if passwd is not None and not password:
+            # We will raise warining in 2022 or later.
+            # See https://github.com/PyMySQL/PyMySQL/issues/939
+            # warnings.warn(
+            #    "'passwd' is deprecated, use 'password'", DeprecationWarning, 3
+            # )
+            password = passwd
+
+        if compress or named_pipe:
+            raise NotImplementedError(
+                "compress and named_pipe arguments are not supported"
+            )
+
+        self._local_infile = bool(local_infile)
+        if self._local_infile:
+            client_flag |= CLIENT.LOCAL_FILES
+
+        if read_default_group and not read_default_file:
+            if sys.platform.startswith("win"):
+                read_default_file = "c:\\my.ini"
+            else:
+                read_default_file = "/etc/my.cnf"
+
+        if read_default_file:
+            if not read_default_group:
+                read_default_group = "client"
+
+            cfg = Parser()
+            cfg.read(os.path.expanduser(read_default_file))
+
+            def _config(key, arg):
+                if arg:
+                    return arg
+                try:
+                    return cfg.get(read_default_group, key)
+                except Exception:
+                    return arg
+
+            user = _config("user", user)
+            password = _config("password", password)
+            host = _config("host", host)
+            database = _config("database", database)
+            unix_socket = _config("socket", unix_socket)
+            port = int(_config("port", port))
+            bind_address = _config("bind-address", bind_address)
+            charset = _config("default-character-set", charset)
+            if not ssl:
+                ssl = {}
+            if isinstance(ssl, dict):
+                for key in ["ca", "capath", "cert", "key", "cipher"]:
+                    value = _config("ssl-" + key, ssl.get(key))
+                    if value:
+                        ssl[key] = value
+
+        self.ssl = False
+        if not ssl_disabled:
+            if ssl_ca or ssl_cert or ssl_key or ssl_verify_cert or ssl_verify_identity:
+                ssl = {
+                    "ca": ssl_ca,
+                    "check_hostname": bool(ssl_verify_identity),
+                    "verify_mode": ssl_verify_cert
+                    if ssl_verify_cert is not None
+                    else False,
+                }
+                if ssl_cert is not None:
+                    ssl["cert"] = ssl_cert
+                if ssl_key is not None:
+                    ssl["key"] = ssl_key
+            if ssl:
+                if not SSL_ENABLED:
+                    raise NotImplementedError("ssl module not found")
+                self.ssl = True
+                client_flag |= CLIENT.SSL
+                self.ctx = self._create_ssl_ctx(ssl)
+
+        self.host = host or "localhost"
+        self.port = port or 3306
+        if type(self.port) is not int:
+            raise ValueError("port should be of type int")
+        self.user = user or DEFAULT_USER
+        self.password = password or b""
+        if isinstance(self.password, str):
+            self.password = self.password.encode("latin1")
+        self.db = database
+        self.unix_socket = unix_socket
+        self.bind_address = bind_address
+        if not (0 < connect_timeout <= 31536000):
+            raise ValueError("connect_timeout should be >0 and <=31536000")
+        self.connect_timeout = connect_timeout or None
+        if read_timeout is not None and read_timeout <= 0:
+            raise ValueError("read_timeout should be > 0")
+        self._read_timeout = read_timeout
+        if write_timeout is not None and write_timeout <= 0:
+            raise ValueError("write_timeout should be > 0")
+        self._write_timeout = write_timeout
+
+        self.charset = charset or DEFAULT_CHARSET
+        self.use_unicode = use_unicode
+
+        self.encoding = charset_by_name(self.charset).encoding
+
+        client_flag |= CLIENT.CAPABILITIES
+        if self.db:
+            client_flag |= CLIENT.CONNECT_WITH_DB
+
+        self.client_flag = client_flag
+
+        self.cursorclass = cursorclass
+
+        self._result = None
+        self._affected_rows = 0
+        self.host_info = "Not connected"
+
+        # specified autocommit mode. None means use server default.
+        self.autocommit_mode = autocommit
+
+        if conv is None:
+            conv = converters.conversions
+
+        # Need for MySQLdb compatibility.
+        self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
+        self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
+        self.sql_mode = sql_mode
+        self.init_command = init_command
+        self.max_allowed_packet = max_allowed_packet
+        self._auth_plugin_map = auth_plugin_map or {}
+        self._binary_prefix = binary_prefix
+        self.server_public_key = server_public_key
+
+        self._connect_attrs = {
+            "_client_name": "pymysql",
+            "_pid": str(os.getpid()),
+            "_client_version": VERSION_STRING,
+        }
+
+        if program_name:
+            self._connect_attrs["program_name"] = program_name
+
+        if defer_connect:
+            self._sock = None
+        else:
+            self.connect()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc_info):
+        del exc_info
+        self.close()
+
+    def _create_ssl_ctx(self, sslp):
+        if isinstance(sslp, ssl.SSLContext):
+            return sslp
+        ca = sslp.get("ca")
+        capath = sslp.get("capath")
+        hasnoca = ca is None and capath is None
+        ctx = ssl.create_default_context(cafile=ca, capath=capath)
+        ctx.check_hostname = not hasnoca and sslp.get("check_hostname", True)
+        verify_mode_value = sslp.get("verify_mode")
+        if verify_mode_value is None:
+            ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
+        elif isinstance(verify_mode_value, bool):
+            ctx.verify_mode = ssl.CERT_REQUIRED if verify_mode_value else ssl.CERT_NONE
+        else:
+            if isinstance(verify_mode_value, str):
+                verify_mode_value = verify_mode_value.lower()
+            if verify_mode_value in ("none", "0", "false", "no"):
+                ctx.verify_mode = ssl.CERT_NONE
+            elif verify_mode_value == "optional":
+                ctx.verify_mode = ssl.CERT_OPTIONAL
+            elif verify_mode_value in ("required", "1", "true", "yes"):
+                ctx.verify_mode = ssl.CERT_REQUIRED
+            else:
+                ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
+        if "cert" in sslp:
+            ctx.load_cert_chain(sslp["cert"], keyfile=sslp.get("key"))
+        if "cipher" in sslp:
+            ctx.set_ciphers(sslp["cipher"])
+        ctx.options |= ssl.OP_NO_SSLv2
+        ctx.options |= ssl.OP_NO_SSLv3
+        return ctx
+
+    def close(self):
+        """
+        Send the quit message and close the socket.
+
+        See `Connection.close() <https://www.python.org/dev/peps/pep-0249/#Connection.close>`_
+        in the specification.
+
+        :raise Error: If the connection is already closed.
+        """
+        if self._closed:
+            raise err.Error("Already closed")
+        self._closed = True
+        if self._sock is None:
+            return
+        send_data = struct.pack("<iB", 1, COMMAND.COM_QUIT)
+        try:
+            self._write_bytes(send_data)
+        except Exception:
+            pass
+        finally:
+            self._force_close()
+
+    @property
+    def open(self):
+        """Return True if the connection is open"""
+        return self._sock is not None
+
+    def _force_close(self):
+        """Close connection without QUIT message"""
+        if self._sock:
+            try:
+                self._sock.close()
+            except:  # noqa
+                pass
+        self._sock = None
+        self._rfile = None
+
+    __del__ = _force_close
+
+    def autocommit(self, value):
+        self.autocommit_mode = bool(value)
+        current = self.get_autocommit()
+        if value != current:
+            self._send_autocommit_mode()
+
+    def get_autocommit(self):
+        return bool(self.server_status & SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT)
+
+    def _read_ok_packet(self):
+        pkt = self._read_packet()
+        if not pkt.is_ok_packet():
+            raise err.OperationalError(2014, "Command Out of Sync")
+        ok = OKPacketWrapper(pkt)
+        self.server_status = ok.server_status
+        return ok
+
+    def _send_autocommit_mode(self):
+        """Set whether or not to commit after every execute()"""
+        self._execute_command(
+            COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" % self.escape(self.autocommit_mode)
+        )
+        self._read_ok_packet()
+
+    def begin(self):
+        """Begin transaction."""
+        self._execute_command(COMMAND.COM_QUERY, "BEGIN")
+        self._read_ok_packet()
+
+    def commit(self):
+        """
+        Commit changes to stable storage.
+
+        See `Connection.commit() <https://www.python.org/dev/peps/pep-0249/#commit>`_
+        in the specification.
+        """
+        self._execute_command(COMMAND.COM_QUERY, "COMMIT")
+        self._read_ok_packet()
+
+    def rollback(self):
+        """
+        Roll back the current transaction.
+
+        See `Connection.rollback() <https://www.python.org/dev/peps/pep-0249/#rollback>`_
+        in the specification.
+        """
+        self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
+        self._read_ok_packet()
+
+    def show_warnings(self):
+        """Send the "SHOW WARNINGS" SQL command."""
+        self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
+        result = MySQLResult(self)
+        result.read()
+        return result.rows
+
+    def select_db(self, db):
+        """
+        Set current db.
+
+        :param db: The name of the db.
+        """
+        self._execute_command(COMMAND.COM_INIT_DB, db)
+        self._read_ok_packet()
+
+    def escape(self, obj, mapping=None):
+        """Escape whatever value you pass to it.
+
+        Non-standard, for internal use; do not use this in your applications.
+        """
+        if isinstance(obj, str):
+            return "'" + self.escape_string(obj) + "'"
+        if isinstance(obj, (bytes, bytearray)):
+            ret = self._quote_bytes(obj)
+            if self._binary_prefix:
+                ret = "_binary" + ret
+            return ret
+        return converters.escape_item(obj, self.charset, mapping=mapping)
+
+    def literal(self, obj):
+        """Alias for escape()
+
+        Non-standard, for internal use; do not use this in your applications.
+        """
+        return self.escape(obj, self.encoders)
+
+    def escape_string(self, s):
+        if self.server_status & SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES:
+            return s.replace("'", "''")
+        return converters.escape_string(s)
+
+    def _quote_bytes(self, s):
+        if self.server_status & SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES:
+            return "'%s'" % (s.replace(b"'", b"''").decode("ascii", "surrogateescape"),)
+        return converters.escape_bytes(s)
+
+    def cursor(self, cursor=None):
+        """
+        Create a new cursor to execute queries with.
+
+        :param cursor: The type of cursor to create; one of :py:class:`Cursor`,
+            :py:class:`SSCursor`, :py:class:`DictCursor`, or :py:class:`SSDictCursor`.
+            None means use Cursor.
+        """
+        if cursor:
+            return cursor(self)
+        return self.cursorclass(self)
+
+    # The following methods are INTERNAL USE ONLY (called from Cursor)
+    def query(self, sql, unbuffered=False):
+        # if DEBUG:
+        #     print("DEBUG: sending query:", sql)
+        if isinstance(sql, str):
+            sql = sql.encode(self.encoding, "surrogateescape")
+        self._execute_command(COMMAND.COM_QUERY, sql)
+        self._affected_rows = self._read_query_result(unbuffered=unbuffered)
+        return self._affected_rows
+
+    def next_result(self, unbuffered=False):
+        self._affected_rows = self._read_query_result(unbuffered=unbuffered)
+        return self._affected_rows
+
+    def affected_rows(self):
+        return self._affected_rows
+
+    def kill(self, thread_id):
+        arg = struct.pack("<I", thread_id)
+        self._execute_command(COMMAND.COM_PROCESS_KILL, arg)
+        return self._read_ok_packet()
+
+    def ping(self, reconnect=True):
+        """
+        Check if the server is alive.
+
+        :param reconnect: If the connection is closed, reconnect.
+        :raise Error: If the connection is closed and reconnect=False.
+        """
+        if self._sock is None:
+            if reconnect:
+                self.connect()
+                reconnect = False
+            else:
+                raise err.Error("Already closed")
+        try:
+            self._execute_command(COMMAND.COM_PING, "")
+            self._read_ok_packet()
+        except Exception:
+            if reconnect:
+                self.connect()
+                self.ping(False)
+            else:
+                raise
+
+    def set_charset(self, charset):
+        # Make sure charset is supported.
+        encoding = charset_by_name(charset).encoding
+
+        self._execute_command(COMMAND.COM_QUERY, "SET NAMES %s" % self.escape(charset))
+        self._read_packet()
+        self.charset = charset
+        self.encoding = encoding
+
+    def connect(self, sock=None):
+        self._closed = False
+        try:
+            if sock is None:
+                if self.unix_socket:
+                    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+                    sock.settimeout(self.connect_timeout)
+                    sock.connect(self.unix_socket)
+                    self.host_info = "Localhost via UNIX socket"
+                    self._secure = True
+                    if DEBUG:
+                        print("connected using unix_socket")
+                else:
+                    kwargs = {}
+                    if self.bind_address is not None:
+                        kwargs["source_address"] = (self.bind_address, 0)
+                    while True:
+                        try:
+                            sock = socket.create_connection(
+                                (self.host, self.port), self.connect_timeout, **kwargs
+                            )
+                            break
+                        except (OSError, IOError) as e:
+                            if e.errno == errno.EINTR:
+                                continue
+                            raise
+                    self.host_info = "socket %s:%d" % (self.host, self.port)
+                    if DEBUG:
+                        print("connected using socket")
+                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                sock.settimeout(None)
+
+            self._sock = sock
+            self._rfile = sock.makefile("rb")
+            self._next_seq_id = 0
+
+            self._get_server_information()
+            self._request_authentication()
+
+            if self.sql_mode is not None:
+                c = self.cursor()
+                c.execute("SET sql_mode=%s", (self.sql_mode,))
+
+            if self.init_command is not None:
+                c = self.cursor()
+                c.execute(self.init_command)
+                c.close()
+                self.commit()
+
+            if self.autocommit_mode is not None:
+                self.autocommit(self.autocommit_mode)
+        except BaseException as e:
+            self._rfile = None
+            if sock is not None:
+                try:
+                    sock.close()
+                except:  # noqa
+                    pass
+
+            if isinstance(e, (OSError, IOError, socket.error)):
+                exc = err.OperationalError(
+                    2003, "Can't connect to MySQL server on %r (%s)" % (self.host, e)
+                )
+                # Keep original exception and traceback to investigate error.
+                exc.original_exception = e
+                exc.traceback = traceback.format_exc()
+                if DEBUG:
+                    print(exc.traceback)
+                raise exc
+
+            # If e is neither DatabaseError or IOError, It's a bug.
+            # But raising AssertionError hides original error.
+            # So just reraise it.
+            raise
+
+    def write_packet(self, payload):
+        """Writes an entire "mysql packet" in its entirety to the network
+        adding its length and sequence number.
+        """
+        # Internal note: when you build packet manually and calls _write_bytes()
+        # directly, you should set self._next_seq_id properly.
+        data = _pack_int24(len(payload)) + bytes([self._next_seq_id]) + payload
+        if DEBUG:
+            dump_packet(data)
+        self._write_bytes(data)
+        self._next_seq_id = (self._next_seq_id + 1) % 256
+
+    def _read_packet(self, packet_type=MysqlPacket):
+        """Read an entire "mysql packet" in its entirety from the network
+        and return a MysqlPacket type that represents the results.
+
+        :raise OperationalError: If the connection to the MySQL server is lost.
+        :raise InternalError: If the packet sequence number is wrong.
+        """
+        buff = bytearray()
+        while True:
+            packet_header = self._read_bytes(4)
+            # if DEBUG: dump_packet(packet_header)
+
+            btrl, btrh, packet_number = struct.unpack("<HBB", packet_header)
+            bytes_to_read = btrl + (btrh << 16)
+            if packet_number != self._next_seq_id:
+                self._force_close()
+                if packet_number == 0:
+                    # MariaDB sends error packet with seqno==0 when shutdown
+                    raise err.OperationalError(
+                        CR.CR_SERVER_LOST,
+                        "Lost connection to MySQL server during query",
+                    )
+                raise err.InternalError(
+                    "Packet sequence number wrong - got %d expected %d"
+                    % (packet_number, self._next_seq_id)
+                )
+            self._next_seq_id = (self._next_seq_id + 1) % 256
+
+            recv_data = self._read_bytes(bytes_to_read)
+            if DEBUG:
+                dump_packet(recv_data)
+            buff += recv_data
+            # https://dev.mysql.com/doc/internals/en/sending-more-than-16mbyte.html
+            if bytes_to_read == 0xFFFFFF:
+                continue
+            if bytes_to_read < MAX_PACKET_LEN:
+                break
+
+        packet = packet_type(bytes(buff), self.encoding)
+        if packet.is_error_packet():
+            if self._result is not None and self._result.unbuffered_active is True:
+                self._result.unbuffered_active = False
+            packet.raise_for_error()
+        return packet
+
+    def _read_bytes(self, num_bytes):
+        self._sock.settimeout(self._read_timeout)
+        while True:
+            try:
+                data = self._rfile.read(num_bytes)
+                break
+            except (IOError, OSError) as e:
+                if e.errno == errno.EINTR:
+                    continue
+                self._force_close()
+                raise err.OperationalError(
+                    CR.CR_SERVER_LOST,
+                    "Lost connection to MySQL server during query (%s)" % (e,),
+                )
+            except BaseException:
+                # Don't convert unknown exception to MySQLError.
+                self._force_close()
+                raise
+        if len(data) < num_bytes:
+            self._force_close()
+            raise err.OperationalError(
+                CR.CR_SERVER_LOST, "Lost connection to MySQL server during query"
+            )
+        return data
+
+    def _write_bytes(self, data):
+        self._sock.settimeout(self._write_timeout)
+        try:
+            self._sock.sendall(data)
+        except IOError as e:
+            self._force_close()
+            raise err.OperationalError(
+                CR.CR_SERVER_GONE_ERROR, "MySQL server has gone away (%r)" % (e,)
+            )
+
+    def _read_query_result(self, unbuffered=False):
+        self._result = None
+        if unbuffered:
+            try:
+                result = MySQLResult(self)
+                result.init_unbuffered_query()
+            except:
+                result.unbuffered_active = False
+                result.connection = None
+                raise
+        else:
+            result = MySQLResult(self)
+            result.read()
+        self._result = result
+        if result.server_status is not None:
+            self.server_status = result.server_status
+        return result.affected_rows
+
+    def insert_id(self):
+        if self._result:
+            return self._result.insert_id
+        else:
+            return 0
+
+    def _execute_command(self, command, sql):
+        """
+        :raise InterfaceError: If the connection is closed.
+        :raise ValueError: If no username was specified.
+        """
+        if not self._sock:
+            raise err.InterfaceError(0, "")
+
+        # If the last query was unbuffered, make sure it finishes before
+        # sending new commands
+        if self._result is not None:
+            if self._result.unbuffered_active:
+                warnings.warn("Previous unbuffered result was left incomplete")
+                self._result._finish_unbuffered_query()
+            while self._result.has_next:
+                self.next_result()
+            self._result = None
+
+        if isinstance(sql, str):
+            sql = sql.encode(self.encoding)
+
+        packet_size = min(MAX_PACKET_LEN, len(sql) + 1)  # +1 is for command
+
+        # tiny optimization: build first packet manually instead of
+        # calling self..write_packet()
+        prelude = struct.pack("<iB", packet_size, command)
+        packet = prelude + sql[: packet_size - 1]
+        self._write_bytes(packet)
+        if DEBUG:
+            dump_packet(packet)
+        self._next_seq_id = 1
+
+        if packet_size < MAX_PACKET_LEN:
+            return
+
+        sql = sql[packet_size - 1 :]
+        while True:
+            packet_size = min(MAX_PACKET_LEN, len(sql))
+            self.write_packet(sql[:packet_size])
+            sql = sql[packet_size:]
+            if not sql and packet_size < MAX_PACKET_LEN:
+                break
+
+    def _request_authentication(self):
+        # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse
+        if int(self.server_version.split(".", 1)[0]) >= 5:
+            self.client_flag |= CLIENT.MULTI_RESULTS
+
+        if self.user is None:
+            raise ValueError("Did not specify a username")
+
+        charset_id = charset_by_name(self.charset).id
+        if isinstance(self.user, str):
+            self.user = self.user.encode(self.encoding)
+
+        data_init = struct.pack(
+            "<iIB23s", self.client_flag, MAX_PACKET_LEN, charset_id, b""
+        )
+
+        if self.ssl and self.server_capabilities & CLIENT.SSL:
+            self.write_packet(data_init)
+
+            self._sock = self.ctx.wrap_socket(self._sock, server_hostname=self.host)
+            self._rfile = self._sock.makefile("rb")
+            self._secure = True
+
+        data = data_init + self.user + b"\0"
+
+        authresp = b""
+        plugin_name = None
+
+        if self._auth_plugin_name == "":
+            plugin_name = b""
+            authresp = _auth.scramble_native_password(self.password, self.salt)
+        elif self._auth_plugin_name == "mysql_native_password":
+            plugin_name = b"mysql_native_password"
+            authresp = _auth.scramble_native_password(self.password, self.salt)
+        elif self._auth_plugin_name == "caching_sha2_password":
+            plugin_name = b"caching_sha2_password"
+            if self.password:
+                if DEBUG:
+                    print("caching_sha2: trying fast path")
+                authresp = _auth.scramble_caching_sha2(self.password, self.salt)
+            else:
+                if DEBUG:
+                    print("caching_sha2: empty password")
+        elif self._auth_plugin_name == "sha256_password":
+            plugin_name = b"sha256_password"
+            if self.ssl and self.server_capabilities & CLIENT.SSL:
+                authresp = self.password + b"\0"
+            elif self.password:
+                authresp = b"\1"  # request public key
+            else:
+                authresp = b"\0"  # empty password
+
+        if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA:
+            data += _lenenc_int(len(authresp)) + authresp
+        elif self.server_capabilities & CLIENT.SECURE_CONNECTION:
+            data += struct.pack("B", len(authresp)) + authresp
+        else:  # pragma: no cover - not testing against servers without secure auth (>=5.0)
+            data += authresp + b"\0"
+
+        if self.db and self.server_capabilities & CLIENT.CONNECT_WITH_DB:
+            if isinstance(self.db, str):
+                self.db = self.db.encode(self.encoding)
+            data += self.db + b"\0"
+
+        if self.server_capabilities & CLIENT.PLUGIN_AUTH:
+            data += (plugin_name or b"") + b"\0"
+
+        if self.server_capabilities & CLIENT.CONNECT_ATTRS:
+            connect_attrs = b""
+            for k, v in self._connect_attrs.items():
+                k = k.encode("utf-8")
+                connect_attrs += struct.pack("B", len(k)) + k
+                v = v.encode("utf-8")
+                connect_attrs += struct.pack("B", len(v)) + v
+            data += struct.pack("B", len(connect_attrs)) + connect_attrs
+
+        self.write_packet(data)
+        auth_packet = self._read_packet()
+
+        # if authentication method isn't accepted the first byte
+        # will have the octet 254
+        if auth_packet.is_auth_switch_request():
+            if DEBUG:
+                print("received auth switch")
+            # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
+            auth_packet.read_uint8()  # 0xfe packet identifier
+            plugin_name = auth_packet.read_string()
+            if (
+                self.server_capabilities & CLIENT.PLUGIN_AUTH
+                and plugin_name is not None
+            ):
+                auth_packet = self._process_auth(plugin_name, auth_packet)
+            else:
+                # send legacy handshake
+                data = _auth.scramble_old_password(self.password, self.salt) + b"\0"
+                self.write_packet(data)
+                auth_packet = self._read_packet()
+        elif auth_packet.is_extra_auth_data():
+            if DEBUG:
+                print("received extra data")
+            # https://dev.mysql.com/doc/internals/en/successful-authentication.html
+            if self._auth_plugin_name == "caching_sha2_password":
+                auth_packet = _auth.caching_sha2_password_auth(self, auth_packet)
+            elif self._auth_plugin_name == "sha256_password":
+                auth_packet = _auth.sha256_password_auth(self, auth_packet)
+            else:
+                raise err.OperationalError(
+                    "Received extra packet for auth method %r", self._auth_plugin_name
+                )
+
+        if DEBUG:
+            print("Succeed to auth")
+
+    def _process_auth(self, plugin_name, auth_packet):
+        handler = self._get_auth_plugin_handler(plugin_name)
+        if handler:
+            try:
+                return handler.authenticate(auth_packet)
+            except AttributeError:
+                if plugin_name != b"dialog":
+                    raise err.OperationalError(
+                        2059,
+                        "Authentication plugin '%s'"
+                        " not loaded: - %r missing authenticate method"
+                        % (plugin_name, type(handler)),
+                    )
+        if plugin_name == b"caching_sha2_password":
+            return _auth.caching_sha2_password_auth(self, auth_packet)
+        elif plugin_name == b"sha256_password":
+            return _auth.sha256_password_auth(self, auth_packet)
+        elif plugin_name == b"mysql_native_password":
+            data = _auth.scramble_native_password(self.password, auth_packet.read_all())
+        elif plugin_name == b"client_ed25519":
+            data = _auth.ed25519_password(self.password, auth_packet.read_all())
+        elif plugin_name == b"mysql_old_password":
+            data = (
+                _auth.scramble_old_password(self.password, auth_packet.read_all())
+                + b"\0"
+            )
+        elif plugin_name == b"mysql_clear_password":
+            # https://dev.mysql.com/doc/internals/en/clear-text-authentication.html
+            data = self.password + b"\0"
+        elif plugin_name == b"dialog":
+            pkt = auth_packet
+            while True:
+                flag = pkt.read_uint8()
+                echo = (flag & 0x06) == 0x02
+                last = (flag & 0x01) == 0x01
+                prompt = pkt.read_all()
+
+                if prompt == b"Password: ":
+                    self.write_packet(self.password + b"\0")
+                elif handler:
+                    resp = "no response - TypeError within plugin.prompt method"
+                    try:
+                        resp = handler.prompt(echo, prompt)
+                        self.write_packet(resp + b"\0")
+                    except AttributeError:
+                        raise err.OperationalError(
+                            2059,
+                            "Authentication plugin '%s'"
+                            " not loaded: - %r missing prompt method"
+                            % (plugin_name, handler),
+                        )
+                    except TypeError:
+                        raise err.OperationalError(
+                            2061,
+                            "Authentication plugin '%s'"
+                            " %r didn't respond with string. Returned '%r' to prompt %r"
+                            % (plugin_name, handler, resp, prompt),
+                        )
+                else:
+                    raise err.OperationalError(
+                        2059,
+                        "Authentication plugin '%s' (%r) not configured"
+                        % (plugin_name, handler),
+                    )
+                pkt = self._read_packet()
+                pkt.check_error()
+                if pkt.is_ok_packet() or last:
+                    break
+            return pkt
+        else:
+            raise err.OperationalError(
+                2059, "Authentication plugin '%s' not configured" % plugin_name
+            )
+
+        self.write_packet(data)
+        pkt = self._read_packet()
+        pkt.check_error()
+        return pkt
+
+    def _get_auth_plugin_handler(self, plugin_name):
+        plugin_class = self._auth_plugin_map.get(plugin_name)
+        if not plugin_class and isinstance(plugin_name, bytes):
+            plugin_class = self._auth_plugin_map.get(plugin_name.decode("ascii"))
+        if plugin_class:
+            try:
+                handler = plugin_class(self)
+            except TypeError:
+                raise err.OperationalError(
+                    2059,
+                    "Authentication plugin '%s'"
+                    " not loaded: - %r cannot be constructed with connection object"
+                    % (plugin_name, plugin_class),
+                )
+        else:
+            handler = None
+        return handler
+
+    # _mysql support
+    def thread_id(self):
+        return self.server_thread_id[0]
+
+    def character_set_name(self):
+        return self.charset
+
+    def get_host_info(self):
+        return self.host_info
+
+    def get_proto_info(self):
+        return self.protocol_version
+
+    def _get_server_information(self):
+        i = 0
+        packet = self._read_packet()
+        data = packet.get_all_data()
+
+        self.protocol_version = data[i]
+        i += 1
+
+        server_end = data.find(b"\0", i)
+        self.server_version = data[i:server_end].decode("latin1")
+        i = server_end + 1
+
+        self.server_thread_id = struct.unpack("<I", data[i : i + 4])
+        i += 4
+
+        self.salt = data[i : i + 8]
+        i += 9  # 8 + 1(filler)
+
+        self.server_capabilities = struct.unpack("<H", data[i : i + 2])[0]
+        i += 2
+
+        if len(data) >= i + 6:
+            lang, stat, cap_h, salt_len = struct.unpack("<BHHB", data[i : i + 6])
+            i += 6
+            # TODO: deprecate server_language and server_charset.
+            # mysqlclient-python doesn't provide it.
+            self.server_language = lang
+            try:
+                self.server_charset = charset_by_id(lang).name
+            except KeyError:
+                # unknown collation
+                self.server_charset = None
+
+            self.server_status = stat
+            if DEBUG:
+                print("server_status: %x" % stat)
+
+            self.server_capabilities |= cap_h << 16
+            if DEBUG:
+                print("salt_len:", salt_len)
+            salt_len = max(12, salt_len - 9)
+
+        # reserved
+        i += 10
+
+        if len(data) >= i + salt_len:
+            # salt_len includes auth_plugin_data_part_1 and filler
+            self.salt += data[i : i + salt_len]
+            i += salt_len
+
+        i += 1
+        # AUTH PLUGIN NAME may appear here.
+        if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i:
+            # Due to Bug#59453 the auth-plugin-name is missing the terminating
+            # NUL-char in versions prior to 5.5.10 and 5.6.2.
+            # ref: https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::Handshake
+            # didn't use version checks as mariadb is corrected and reports
+            # earlier than those two.
+            server_end = data.find(b"\0", i)
+            if server_end < 0:  # pragma: no cover - very specific upstream bug
+                # not found \0 and last field so take it all
+                self._auth_plugin_name = data[i:].decode("utf-8")
+            else:
+                self._auth_plugin_name = data[i:server_end].decode("utf-8")
+
+    def get_server_info(self):
+        return self.server_version
+
+    Warning = err.Warning
+    Error = err.Error
+    InterfaceError = err.InterfaceError
+    DatabaseError = err.DatabaseError
+    DataError = err.DataError
+    OperationalError = err.OperationalError
+    IntegrityError = err.IntegrityError
+    InternalError = err.InternalError
+    ProgrammingError = err.ProgrammingError
+    NotSupportedError = err.NotSupportedError
+
+
+class MySQLResult:
+    def __init__(self, connection):
+        """
+        :type connection: Connection
+        """
+        self.connection = connection
+        self.affected_rows = None
+        self.insert_id = None
+        self.server_status = None
+        self.warning_count = 0
+        self.message = None
+        self.field_count = 0
+        self.description = None
+        self.rows = None
+        self.has_next = None
+        self.unbuffered_active = False
+
+    def __del__(self):
+        if self.unbuffered_active:
+            self._finish_unbuffered_query()
+
+    def read(self):
+        try:
+            first_packet = self.connection._read_packet()
+
+            if first_packet.is_ok_packet():
+                self._read_ok_packet(first_packet)
+            elif first_packet.is_load_local_packet():
+                self._read_load_local_packet(first_packet)
+            else:
+                self._read_result_packet(first_packet)
+        finally:
+            self.connection = None
+
+    def init_unbuffered_query(self):
+        """
+        :raise OperationalError: If the connection to the MySQL server is lost.
+        :raise InternalError:
+        """
+        self.unbuffered_active = True
+        first_packet = self.connection._read_packet()
+
+        if first_packet.is_ok_packet():
+            self._read_ok_packet(first_packet)
+            self.unbuffered_active = False
+            self.connection = None
+        elif first_packet.is_load_local_packet():
+            self._read_load_local_packet(first_packet)
+            self.unbuffered_active = False
+            self.connection = None
+        else:
+            self.field_count = first_packet.read_length_encoded_integer()
+            self._get_descriptions()
+
+            # Apparently, MySQLdb picks this number because it's the maximum
+            # value of a 64bit unsigned integer. Since we're emulating MySQLdb,
+            # we set it to this instead of None, which would be preferred.
+            self.affected_rows = 18446744073709551615
+
+    def _read_ok_packet(self, first_packet):
+        ok_packet = OKPacketWrapper(first_packet)
+        self.affected_rows = ok_packet.affected_rows
+        self.insert_id = ok_packet.insert_id
+        self.server_status = ok_packet.server_status
+        self.warning_count = ok_packet.warning_count
+        self.message = ok_packet.message
+        self.has_next = ok_packet.has_next
+
+    def _read_load_local_packet(self, first_packet):
+        if not self.connection._local_infile:
+            raise RuntimeError(
+                "**WARN**: Received LOAD_LOCAL packet but local_infile option is false."
+            )
+        load_packet = LoadLocalPacketWrapper(first_packet)
+        sender = LoadLocalFile(load_packet.filename, self.connection)
+        try:
+            sender.send_data()
+        except:
+            self.connection._read_packet()  # skip ok packet
+            raise
+
+        ok_packet = self.connection._read_packet()
+        if (
+            not ok_packet.is_ok_packet()
+        ):  # pragma: no cover - upstream induced protocol error
+            raise err.OperationalError(2014, "Commands Out of Sync")
+        self._read_ok_packet(ok_packet)
+
+    def _check_packet_is_eof(self, packet):
+        if not packet.is_eof_packet():
+            return False
+        # TODO: Support CLIENT.DEPRECATE_EOF
+        # 1) Add DEPRECATE_EOF to CAPABILITIES
+        # 2) Mask CAPABILITIES with server_capabilities
+        # 3) if server_capabilities & CLIENT.DEPRECATE_EOF: use OKPacketWrapper instead of EOFPacketWrapper
+        wp = EOFPacketWrapper(packet)
+        self.warning_count = wp.warning_count
+        self.has_next = wp.has_next
+        return True
+
+    def _read_result_packet(self, first_packet):
+        self.field_count = first_packet.read_length_encoded_integer()
+        self._get_descriptions()
+        self._read_rowdata_packet()
+
+    def _read_rowdata_packet_unbuffered(self):
+        # Check if in an active query
+        if not self.unbuffered_active:
+            return
+
+        # EOF
+        packet = self.connection._read_packet()
+        if self._check_packet_is_eof(packet):
+            self.unbuffered_active = False
+            self.connection = None
+            self.rows = None
+            return
+
+        row = self._read_row_from_packet(packet)
+        self.affected_rows = 1
+        self.rows = (row,)  # rows should tuple of row for MySQL-python compatibility.
+        return row
+
+    def _finish_unbuffered_query(self):
+        # After much reading on the MySQL protocol, it appears that there is,
+        # in fact, no way to stop MySQL from sending all the data after
+        # executing a query, so we just spin, and wait for an EOF packet.
+        while self.unbuffered_active:
+            packet = self.connection._read_packet()
+            if self._check_packet_is_eof(packet):
+                self.unbuffered_active = False
+                self.connection = None  # release reference to kill cyclic reference.
+
+    def _read_rowdata_packet(self):
+        """Read a rowdata packet for each data row in the result set."""
+        rows = []
+        while True:
+            packet = self.connection._read_packet()
+            if self._check_packet_is_eof(packet):
+                self.connection = None  # release reference to kill cyclic reference.
+                break
+            rows.append(self._read_row_from_packet(packet))
+
+        self.affected_rows = len(rows)
+        self.rows = tuple(rows)
+
+    def _read_row_from_packet(self, packet):
+        row = []
+        for encoding, converter in self.converters:
+            try:
+                data = packet.read_length_coded_string()
+            except IndexError:
+                # No more columns in this row
+                # See https://github.com/PyMySQL/PyMySQL/pull/434
+                break
+            if data is not None:
+                if encoding is not None:
+                    data = data.decode(encoding)
+                if DEBUG:
+                    print("DEBUG: DATA = ", data)
+                if converter is not None:
+                    data = converter(data)
+            row.append(data)
+        return tuple(row)
+
+    def _get_descriptions(self):
+        """Read a column descriptor packet for each column in the result."""
+        self.fields = []
+        self.converters = []
+        use_unicode = self.connection.use_unicode
+        conn_encoding = self.connection.encoding
+        description = []
+
+        for i in range(self.field_count):
+            field = self.connection._read_packet(FieldDescriptorPacket)
+            self.fields.append(field)
+            description.append(field.description())
+            field_type = field.type_code
+            if use_unicode:
+                if field_type == FIELD_TYPE.JSON:
+                    # When SELECT from JSON column: charset = binary
+                    # When SELECT CAST(... AS JSON): charset = connection encoding
+                    # This behavior is different from TEXT / BLOB.
+                    # We should decode result by connection encoding regardless charsetnr.
+                    # See https://github.com/PyMySQL/PyMySQL/issues/488
+                    encoding = conn_encoding  # SELECT CAST(... AS JSON)
+                elif field_type in TEXT_TYPES:
+                    if field.charsetnr == 63:  # binary
+                        # TEXTs with charset=binary means BINARY types.
+                        encoding = None
+                    else:
+                        encoding = conn_encoding
+                else:
+                    # Integers, Dates and Times, and other basic data is encoded in ascii
+                    encoding = "ascii"
+            else:
+                encoding = None
+            converter = self.connection.decoders.get(field_type)
+            if converter is converters.through:
+                converter = None
+            if DEBUG:
+                print(f"DEBUG: field={field}, converter={converter}")
+            self.converters.append((encoding, converter))
+
+        eof_packet = self.connection._read_packet()
+        assert eof_packet.is_eof_packet(), "Protocol error, expecting EOF"
+        self.description = tuple(description)
+
+
+class LoadLocalFile:
+    def __init__(self, filename, connection):
+        self.filename = filename
+        self.connection = connection
+
+    def send_data(self):
+        """Send data packets from the local file to the server"""
+        if not self.connection._sock:
+            raise err.InterfaceError(0, "")
+        conn = self.connection
+
+        try:
+            with open(self.filename, "rb") as open_file:
+                packet_size = min(
+                    conn.max_allowed_packet, 16 * 1024
+                )  # 16KB is efficient enough
+                while True:
+                    chunk = open_file.read(packet_size)
+                    if not chunk:
+                        break
+                    conn.write_packet(chunk)
+        except IOError:
+            raise err.OperationalError(1017, f"Can't find file '{self.filename}'")
+        finally:
+            # send the empty packet to signify we are done sending data
+            conn.write_packet(b"")

+ 38 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/CLIENT.py

@@ -0,0 +1,38 @@
+# https://dev.mysql.com/doc/internals/en/capability-flags.html#packet-Protocol::CapabilityFlags
+LONG_PASSWORD = 1
+FOUND_ROWS = 1 << 1
+LONG_FLAG = 1 << 2
+CONNECT_WITH_DB = 1 << 3
+NO_SCHEMA = 1 << 4
+COMPRESS = 1 << 5
+ODBC = 1 << 6
+LOCAL_FILES = 1 << 7
+IGNORE_SPACE = 1 << 8
+PROTOCOL_41 = 1 << 9
+INTERACTIVE = 1 << 10
+SSL = 1 << 11
+IGNORE_SIGPIPE = 1 << 12
+TRANSACTIONS = 1 << 13
+SECURE_CONNECTION = 1 << 15
+MULTI_STATEMENTS = 1 << 16
+MULTI_RESULTS = 1 << 17
+PS_MULTI_RESULTS = 1 << 18
+PLUGIN_AUTH = 1 << 19
+CONNECT_ATTRS = 1 << 20
+PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21
+CAPABILITIES = (
+    LONG_PASSWORD
+    | LONG_FLAG
+    | PROTOCOL_41
+    | TRANSACTIONS
+    | SECURE_CONNECTION
+    | MULTI_RESULTS
+    | PLUGIN_AUTH
+    | PLUGIN_AUTH_LENENC_CLIENT_DATA
+    | CONNECT_ATTRS
+)
+
+# Not done yet
+HANDLE_EXPIRED_PASSWORDS = 1 << 22
+SESSION_TRACK = 1 << 23
+DEPRECATE_EOF = 1 << 24

+ 32 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/COMMAND.py

@@ -0,0 +1,32 @@
+COM_SLEEP = 0x00
+COM_QUIT = 0x01
+COM_INIT_DB = 0x02
+COM_QUERY = 0x03
+COM_FIELD_LIST = 0x04
+COM_CREATE_DB = 0x05
+COM_DROP_DB = 0x06
+COM_REFRESH = 0x07
+COM_SHUTDOWN = 0x08
+COM_STATISTICS = 0x09
+COM_PROCESS_INFO = 0x0A
+COM_CONNECT = 0x0B
+COM_PROCESS_KILL = 0x0C
+COM_DEBUG = 0x0D
+COM_PING = 0x0E
+COM_TIME = 0x0F
+COM_DELAYED_INSERT = 0x10
+COM_CHANGE_USER = 0x11
+COM_BINLOG_DUMP = 0x12
+COM_TABLE_DUMP = 0x13
+COM_CONNECT_OUT = 0x14
+COM_REGISTER_SLAVE = 0x15
+COM_STMT_PREPARE = 0x16
+COM_STMT_EXECUTE = 0x17
+COM_STMT_SEND_LONG_DATA = 0x18
+COM_STMT_CLOSE = 0x19
+COM_STMT_RESET = 0x1A
+COM_SET_OPTION = 0x1B
+COM_STMT_FETCH = 0x1C
+COM_DAEMON = 0x1D
+COM_BINLOG_DUMP_GTID = 0x1E
+COM_END = 0x1F

+ 68 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/CR.py

@@ -0,0 +1,68 @@
+# flake8: noqa
+# errmsg.h
+CR_ERROR_FIRST = 2000
+CR_UNKNOWN_ERROR = 2000
+CR_SOCKET_CREATE_ERROR = 2001
+CR_CONNECTION_ERROR = 2002
+CR_CONN_HOST_ERROR = 2003
+CR_IPSOCK_ERROR = 2004
+CR_UNKNOWN_HOST = 2005
+CR_SERVER_GONE_ERROR = 2006
+CR_VERSION_ERROR = 2007
+CR_OUT_OF_MEMORY = 2008
+CR_WRONG_HOST_INFO = 2009
+CR_LOCALHOST_CONNECTION = 2010
+CR_TCP_CONNECTION = 2011
+CR_SERVER_HANDSHAKE_ERR = 2012
+CR_SERVER_LOST = 2013
+CR_COMMANDS_OUT_OF_SYNC = 2014
+CR_NAMEDPIPE_CONNECTION = 2015
+CR_NAMEDPIPEWAIT_ERROR = 2016
+CR_NAMEDPIPEOPEN_ERROR = 2017
+CR_NAMEDPIPESETSTATE_ERROR = 2018
+CR_CANT_READ_CHARSET = 2019
+CR_NET_PACKET_TOO_LARGE = 2020
+CR_EMBEDDED_CONNECTION = 2021
+CR_PROBE_SLAVE_STATUS = 2022
+CR_PROBE_SLAVE_HOSTS = 2023
+CR_PROBE_SLAVE_CONNECT = 2024
+CR_PROBE_MASTER_CONNECT = 2025
+CR_SSL_CONNECTION_ERROR = 2026
+CR_MALFORMED_PACKET = 2027
+CR_WRONG_LICENSE = 2028
+
+CR_NULL_POINTER = 2029
+CR_NO_PREPARE_STMT = 2030
+CR_PARAMS_NOT_BOUND = 2031
+CR_DATA_TRUNCATED = 2032
+CR_NO_PARAMETERS_EXISTS = 2033
+CR_INVALID_PARAMETER_NO = 2034
+CR_INVALID_BUFFER_USE = 2035
+CR_UNSUPPORTED_PARAM_TYPE = 2036
+
+CR_SHARED_MEMORY_CONNECTION = 2037
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
+CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
+CR_SHARED_MEMORY_MAP_ERROR = 2043
+CR_SHARED_MEMORY_EVENT_ERROR = 2044
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
+CR_CONN_UNKNOW_PROTOCOL = 2047
+CR_INVALID_CONN_HANDLE = 2048
+CR_SECURE_AUTH = 2049
+CR_FETCH_CANCELED = 2050
+CR_NO_DATA = 2051
+CR_NO_STMT_METADATA = 2052
+CR_NO_RESULT_SET = 2053
+CR_NOT_IMPLEMENTED = 2054
+CR_SERVER_LOST_EXTENDED = 2055
+CR_STMT_CLOSED = 2056
+CR_NEW_STMT_METADATA = 2057
+CR_ALREADY_CONNECTED = 2058
+CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
+CR_DUPLICATE_CONNECTION_ATTR = 2060
+CR_AUTH_PLUGIN_ERR = 2061
+CR_ERROR_LAST = 2061

+ 474 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/ER.py

@@ -0,0 +1,474 @@
+ERROR_FIRST = 1000
+HASHCHK = 1000
+NISAMCHK = 1001
+NO = 1002
+YES = 1003
+CANT_CREATE_FILE = 1004
+CANT_CREATE_TABLE = 1005
+CANT_CREATE_DB = 1006
+DB_CREATE_EXISTS = 1007
+DB_DROP_EXISTS = 1008
+DB_DROP_DELETE = 1009
+DB_DROP_RMDIR = 1010
+CANT_DELETE_FILE = 1011
+CANT_FIND_SYSTEM_REC = 1012
+CANT_GET_STAT = 1013
+CANT_GET_WD = 1014
+CANT_LOCK = 1015
+CANT_OPEN_FILE = 1016
+FILE_NOT_FOUND = 1017
+CANT_READ_DIR = 1018
+CANT_SET_WD = 1019
+CHECKREAD = 1020
+DISK_FULL = 1021
+DUP_KEY = 1022
+ERROR_ON_CLOSE = 1023
+ERROR_ON_READ = 1024
+ERROR_ON_RENAME = 1025
+ERROR_ON_WRITE = 1026
+FILE_USED = 1027
+FILSORT_ABORT = 1028
+FORM_NOT_FOUND = 1029
+GET_ERRNO = 1030
+ILLEGAL_HA = 1031
+KEY_NOT_FOUND = 1032
+NOT_FORM_FILE = 1033
+NOT_KEYFILE = 1034
+OLD_KEYFILE = 1035
+OPEN_AS_READONLY = 1036
+OUTOFMEMORY = 1037
+OUT_OF_SORTMEMORY = 1038
+UNEXPECTED_EOF = 1039
+CON_COUNT_ERROR = 1040
+OUT_OF_RESOURCES = 1041
+BAD_HOST_ERROR = 1042
+HANDSHAKE_ERROR = 1043
+DBACCESS_DENIED_ERROR = 1044
+ACCESS_DENIED_ERROR = 1045
+NO_DB_ERROR = 1046
+UNKNOWN_COM_ERROR = 1047
+BAD_NULL_ERROR = 1048
+BAD_DB_ERROR = 1049
+TABLE_EXISTS_ERROR = 1050
+BAD_TABLE_ERROR = 1051
+NON_UNIQ_ERROR = 1052
+SERVER_SHUTDOWN = 1053
+BAD_FIELD_ERROR = 1054
+WRONG_FIELD_WITH_GROUP = 1055
+WRONG_GROUP_FIELD = 1056
+WRONG_SUM_SELECT = 1057
+WRONG_VALUE_COUNT = 1058
+TOO_LONG_IDENT = 1059
+DUP_FIELDNAME = 1060
+DUP_KEYNAME = 1061
+DUP_ENTRY = 1062
+WRONG_FIELD_SPEC = 1063
+PARSE_ERROR = 1064
+EMPTY_QUERY = 1065
+NONUNIQ_TABLE = 1066
+INVALID_DEFAULT = 1067
+MULTIPLE_PRI_KEY = 1068
+TOO_MANY_KEYS = 1069
+TOO_MANY_KEY_PARTS = 1070
+TOO_LONG_KEY = 1071
+KEY_COLUMN_DOES_NOT_EXITS = 1072
+BLOB_USED_AS_KEY = 1073
+TOO_BIG_FIELDLENGTH = 1074
+WRONG_AUTO_KEY = 1075
+READY = 1076
+NORMAL_SHUTDOWN = 1077
+GOT_SIGNAL = 1078
+SHUTDOWN_COMPLETE = 1079
+FORCING_CLOSE = 1080
+IPSOCK_ERROR = 1081
+NO_SUCH_INDEX = 1082
+WRONG_FIELD_TERMINATORS = 1083
+BLOBS_AND_NO_TERMINATED = 1084
+TEXTFILE_NOT_READABLE = 1085
+FILE_EXISTS_ERROR = 1086
+LOAD_INFO = 1087
+ALTER_INFO = 1088
+WRONG_SUB_KEY = 1089
+CANT_REMOVE_ALL_FIELDS = 1090
+CANT_DROP_FIELD_OR_KEY = 1091
+INSERT_INFO = 1092
+UPDATE_TABLE_USED = 1093
+NO_SUCH_THREAD = 1094
+KILL_DENIED_ERROR = 1095
+NO_TABLES_USED = 1096
+TOO_BIG_SET = 1097
+NO_UNIQUE_LOGFILE = 1098
+TABLE_NOT_LOCKED_FOR_WRITE = 1099
+TABLE_NOT_LOCKED = 1100
+BLOB_CANT_HAVE_DEFAULT = 1101
+WRONG_DB_NAME = 1102
+WRONG_TABLE_NAME = 1103
+TOO_BIG_SELECT = 1104
+UNKNOWN_ERROR = 1105
+UNKNOWN_PROCEDURE = 1106
+WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
+WRONG_PARAMETERS_TO_PROCEDURE = 1108
+UNKNOWN_TABLE = 1109
+FIELD_SPECIFIED_TWICE = 1110
+INVALID_GROUP_FUNC_USE = 1111
+UNSUPPORTED_EXTENSION = 1112
+TABLE_MUST_HAVE_COLUMNS = 1113
+RECORD_FILE_FULL = 1114
+UNKNOWN_CHARACTER_SET = 1115
+TOO_MANY_TABLES = 1116
+TOO_MANY_FIELDS = 1117
+TOO_BIG_ROWSIZE = 1118
+STACK_OVERRUN = 1119
+WRONG_OUTER_JOIN = 1120
+NULL_COLUMN_IN_INDEX = 1121
+CANT_FIND_UDF = 1122
+CANT_INITIALIZE_UDF = 1123
+UDF_NO_PATHS = 1124
+UDF_EXISTS = 1125
+CANT_OPEN_LIBRARY = 1126
+CANT_FIND_DL_ENTRY = 1127
+FUNCTION_NOT_DEFINED = 1128
+HOST_IS_BLOCKED = 1129
+HOST_NOT_PRIVILEGED = 1130
+PASSWORD_ANONYMOUS_USER = 1131
+PASSWORD_NOT_ALLOWED = 1132
+PASSWORD_NO_MATCH = 1133
+UPDATE_INFO = 1134
+CANT_CREATE_THREAD = 1135
+WRONG_VALUE_COUNT_ON_ROW = 1136
+CANT_REOPEN_TABLE = 1137
+INVALID_USE_OF_NULL = 1138
+REGEXP_ERROR = 1139
+MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
+NONEXISTING_GRANT = 1141
+TABLEACCESS_DENIED_ERROR = 1142
+COLUMNACCESS_DENIED_ERROR = 1143
+ILLEGAL_GRANT_FOR_TABLE = 1144
+GRANT_WRONG_HOST_OR_USER = 1145
+NO_SUCH_TABLE = 1146
+NONEXISTING_TABLE_GRANT = 1147
+NOT_ALLOWED_COMMAND = 1148
+SYNTAX_ERROR = 1149
+DELAYED_CANT_CHANGE_LOCK = 1150
+TOO_MANY_DELAYED_THREADS = 1151
+ABORTING_CONNECTION = 1152
+NET_PACKET_TOO_LARGE = 1153
+NET_READ_ERROR_FROM_PIPE = 1154
+NET_FCNTL_ERROR = 1155
+NET_PACKETS_OUT_OF_ORDER = 1156
+NET_UNCOMPRESS_ERROR = 1157
+NET_READ_ERROR = 1158
+NET_READ_INTERRUPTED = 1159
+NET_ERROR_ON_WRITE = 1160
+NET_WRITE_INTERRUPTED = 1161
+TOO_LONG_STRING = 1162
+TABLE_CANT_HANDLE_BLOB = 1163
+TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
+DELAYED_INSERT_TABLE_LOCKED = 1165
+WRONG_COLUMN_NAME = 1166
+WRONG_KEY_COLUMN = 1167
+WRONG_MRG_TABLE = 1168
+DUP_UNIQUE = 1169
+BLOB_KEY_WITHOUT_LENGTH = 1170
+PRIMARY_CANT_HAVE_NULL = 1171
+TOO_MANY_ROWS = 1172
+REQUIRES_PRIMARY_KEY = 1173
+NO_RAID_COMPILED = 1174
+UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
+KEY_DOES_NOT_EXITS = 1176
+CHECK_NO_SUCH_TABLE = 1177
+CHECK_NOT_IMPLEMENTED = 1178
+CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
+ERROR_DURING_COMMIT = 1180
+ERROR_DURING_ROLLBACK = 1181
+ERROR_DURING_FLUSH_LOGS = 1182
+ERROR_DURING_CHECKPOINT = 1183
+NEW_ABORTING_CONNECTION = 1184
+DUMP_NOT_IMPLEMENTED = 1185
+FLUSH_MASTER_BINLOG_CLOSED = 1186
+INDEX_REBUILD = 1187
+MASTER = 1188
+MASTER_NET_READ = 1189
+MASTER_NET_WRITE = 1190
+FT_MATCHING_KEY_NOT_FOUND = 1191
+LOCK_OR_ACTIVE_TRANSACTION = 1192
+UNKNOWN_SYSTEM_VARIABLE = 1193
+CRASHED_ON_USAGE = 1194
+CRASHED_ON_REPAIR = 1195
+WARNING_NOT_COMPLETE_ROLLBACK = 1196
+TRANS_CACHE_FULL = 1197
+SLAVE_MUST_STOP = 1198
+SLAVE_NOT_RUNNING = 1199
+BAD_SLAVE = 1200
+MASTER_INFO = 1201
+SLAVE_THREAD = 1202
+TOO_MANY_USER_CONNECTIONS = 1203
+SET_CONSTANTS_ONLY = 1204
+LOCK_WAIT_TIMEOUT = 1205
+LOCK_TABLE_FULL = 1206
+READ_ONLY_TRANSACTION = 1207
+DROP_DB_WITH_READ_LOCK = 1208
+CREATE_DB_WITH_READ_LOCK = 1209
+WRONG_ARGUMENTS = 1210
+NO_PERMISSION_TO_CREATE_USER = 1211
+UNION_TABLES_IN_DIFFERENT_DIR = 1212
+LOCK_DEADLOCK = 1213
+TABLE_CANT_HANDLE_FT = 1214
+CANNOT_ADD_FOREIGN = 1215
+NO_REFERENCED_ROW = 1216
+ROW_IS_REFERENCED = 1217
+CONNECT_TO_MASTER = 1218
+QUERY_ON_MASTER = 1219
+ERROR_WHEN_EXECUTING_COMMAND = 1220
+WRONG_USAGE = 1221
+WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
+CANT_UPDATE_WITH_READLOCK = 1223
+MIXING_NOT_ALLOWED = 1224
+DUP_ARGUMENT = 1225
+USER_LIMIT_REACHED = 1226
+SPECIFIC_ACCESS_DENIED_ERROR = 1227
+LOCAL_VARIABLE = 1228
+GLOBAL_VARIABLE = 1229
+NO_DEFAULT = 1230
+WRONG_VALUE_FOR_VAR = 1231
+WRONG_TYPE_FOR_VAR = 1232
+VAR_CANT_BE_READ = 1233
+CANT_USE_OPTION_HERE = 1234
+NOT_SUPPORTED_YET = 1235
+MASTER_FATAL_ERROR_READING_BINLOG = 1236
+SLAVE_IGNORED_TABLE = 1237
+INCORRECT_GLOBAL_LOCAL_VAR = 1238
+WRONG_FK_DEF = 1239
+KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
+OPERAND_COLUMNS = 1241
+SUBQUERY_NO_1_ROW = 1242
+UNKNOWN_STMT_HANDLER = 1243
+CORRUPT_HELP_DB = 1244
+CYCLIC_REFERENCE = 1245
+AUTO_CONVERT = 1246
+ILLEGAL_REFERENCE = 1247
+DERIVED_MUST_HAVE_ALIAS = 1248
+SELECT_REDUCED = 1249
+TABLENAME_NOT_ALLOWED_HERE = 1250
+NOT_SUPPORTED_AUTH_MODE = 1251
+SPATIAL_CANT_HAVE_NULL = 1252
+COLLATION_CHARSET_MISMATCH = 1253
+SLAVE_WAS_RUNNING = 1254
+SLAVE_WAS_NOT_RUNNING = 1255
+TOO_BIG_FOR_UNCOMPRESS = 1256
+ZLIB_Z_MEM_ERROR = 1257
+ZLIB_Z_BUF_ERROR = 1258
+ZLIB_Z_DATA_ERROR = 1259
+CUT_VALUE_GROUP_CONCAT = 1260
+WARN_TOO_FEW_RECORDS = 1261
+WARN_TOO_MANY_RECORDS = 1262
+WARN_NULL_TO_NOTNULL = 1263
+WARN_DATA_OUT_OF_RANGE = 1264
+WARN_DATA_TRUNCATED = 1265
+WARN_USING_OTHER_HANDLER = 1266
+CANT_AGGREGATE_2COLLATIONS = 1267
+DROP_USER = 1268
+REVOKE_GRANTS = 1269
+CANT_AGGREGATE_3COLLATIONS = 1270
+CANT_AGGREGATE_NCOLLATIONS = 1271
+VARIABLE_IS_NOT_STRUCT = 1272
+UNKNOWN_COLLATION = 1273
+SLAVE_IGNORED_SSL_PARAMS = 1274
+SERVER_IS_IN_SECURE_AUTH_MODE = 1275
+WARN_FIELD_RESOLVED = 1276
+BAD_SLAVE_UNTIL_COND = 1277
+MISSING_SKIP_SLAVE = 1278
+UNTIL_COND_IGNORED = 1279
+WRONG_NAME_FOR_INDEX = 1280
+WRONG_NAME_FOR_CATALOG = 1281
+WARN_QC_RESIZE = 1282
+BAD_FT_COLUMN = 1283
+UNKNOWN_KEY_CACHE = 1284
+WARN_HOSTNAME_WONT_WORK = 1285
+UNKNOWN_STORAGE_ENGINE = 1286
+WARN_DEPRECATED_SYNTAX = 1287
+NON_UPDATABLE_TABLE = 1288
+FEATURE_DISABLED = 1289
+OPTION_PREVENTS_STATEMENT = 1290
+DUPLICATED_VALUE_IN_TYPE = 1291
+TRUNCATED_WRONG_VALUE = 1292
+TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
+INVALID_ON_UPDATE = 1294
+UNSUPPORTED_PS = 1295
+GET_ERRMSG = 1296
+GET_TEMPORARY_ERRMSG = 1297
+UNKNOWN_TIME_ZONE = 1298
+WARN_INVALID_TIMESTAMP = 1299
+INVALID_CHARACTER_STRING = 1300
+WARN_ALLOWED_PACKET_OVERFLOWED = 1301
+CONFLICTING_DECLARATIONS = 1302
+SP_NO_RECURSIVE_CREATE = 1303
+SP_ALREADY_EXISTS = 1304
+SP_DOES_NOT_EXIST = 1305
+SP_DROP_FAILED = 1306
+SP_STORE_FAILED = 1307
+SP_LILABEL_MISMATCH = 1308
+SP_LABEL_REDEFINE = 1309
+SP_LABEL_MISMATCH = 1310
+SP_UNINIT_VAR = 1311
+SP_BADSELECT = 1312
+SP_BADRETURN = 1313
+SP_BADSTATEMENT = 1314
+UPDATE_LOG_DEPRECATED_IGNORED = 1315
+UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
+QUERY_INTERRUPTED = 1317
+SP_WRONG_NO_OF_ARGS = 1318
+SP_COND_MISMATCH = 1319
+SP_NORETURN = 1320
+SP_NORETURNEND = 1321
+SP_BAD_CURSOR_QUERY = 1322
+SP_BAD_CURSOR_SELECT = 1323
+SP_CURSOR_MISMATCH = 1324
+SP_CURSOR_ALREADY_OPEN = 1325
+SP_CURSOR_NOT_OPEN = 1326
+SP_UNDECLARED_VAR = 1327
+SP_WRONG_NO_OF_FETCH_ARGS = 1328
+SP_FETCH_NO_DATA = 1329
+SP_DUP_PARAM = 1330
+SP_DUP_VAR = 1331
+SP_DUP_COND = 1332
+SP_DUP_CURS = 1333
+SP_CANT_ALTER = 1334
+SP_SUBSELECT_NYI = 1335
+STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
+SP_VARCOND_AFTER_CURSHNDLR = 1337
+SP_CURSOR_AFTER_HANDLER = 1338
+SP_CASE_NOT_FOUND = 1339
+FPARSER_TOO_BIG_FILE = 1340
+FPARSER_BAD_HEADER = 1341
+FPARSER_EOF_IN_COMMENT = 1342
+FPARSER_ERROR_IN_PARAMETER = 1343
+FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
+VIEW_NO_EXPLAIN = 1345
+FRM_UNKNOWN_TYPE = 1346
+WRONG_OBJECT = 1347
+NONUPDATEABLE_COLUMN = 1348
+VIEW_SELECT_DERIVED = 1349
+VIEW_SELECT_CLAUSE = 1350
+VIEW_SELECT_VARIABLE = 1351
+VIEW_SELECT_TMPTABLE = 1352
+VIEW_WRONG_LIST = 1353
+WARN_VIEW_MERGE = 1354
+WARN_VIEW_WITHOUT_KEY = 1355
+VIEW_INVALID = 1356
+SP_NO_DROP_SP = 1357
+SP_GOTO_IN_HNDLR = 1358
+TRG_ALREADY_EXISTS = 1359
+TRG_DOES_NOT_EXIST = 1360
+TRG_ON_VIEW_OR_TEMP_TABLE = 1361
+TRG_CANT_CHANGE_ROW = 1362
+TRG_NO_SUCH_ROW_IN_TRG = 1363
+NO_DEFAULT_FOR_FIELD = 1364
+DIVISION_BY_ZERO = 1365
+TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
+ILLEGAL_VALUE_FOR_TYPE = 1367
+VIEW_NONUPD_CHECK = 1368
+VIEW_CHECK_FAILED = 1369
+PROCACCESS_DENIED_ERROR = 1370
+RELAY_LOG_FAIL = 1371
+PASSWD_LENGTH = 1372
+UNKNOWN_TARGET_BINLOG = 1373
+IO_ERR_LOG_INDEX_READ = 1374
+BINLOG_PURGE_PROHIBITED = 1375
+FSEEK_FAIL = 1376
+BINLOG_PURGE_FATAL_ERR = 1377
+LOG_IN_USE = 1378
+LOG_PURGE_UNKNOWN_ERR = 1379
+RELAY_LOG_INIT = 1380
+NO_BINARY_LOGGING = 1381
+RESERVED_SYNTAX = 1382
+WSAS_FAILED = 1383
+DIFF_GROUPS_PROC = 1384
+NO_GROUP_FOR_PROC = 1385
+ORDER_WITH_PROC = 1386
+LOGGING_PROHIBIT_CHANGING_OF = 1387
+NO_FILE_MAPPING = 1388
+WRONG_MAGIC = 1389
+PS_MANY_PARAM = 1390
+KEY_PART_0 = 1391
+VIEW_CHECKSUM = 1392
+VIEW_MULTIUPDATE = 1393
+VIEW_NO_INSERT_FIELD_LIST = 1394
+VIEW_DELETE_MERGE_VIEW = 1395
+CANNOT_USER = 1396
+XAER_NOTA = 1397
+XAER_INVAL = 1398
+XAER_RMFAIL = 1399
+XAER_OUTSIDE = 1400
+XAER_RMERR = 1401
+XA_RBROLLBACK = 1402
+NONEXISTING_PROC_GRANT = 1403
+PROC_AUTO_GRANT_FAIL = 1404
+PROC_AUTO_REVOKE_FAIL = 1405
+DATA_TOO_LONG = 1406
+SP_BAD_SQLSTATE = 1407
+STARTUP = 1408
+LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
+CANT_CREATE_USER_WITH_GRANT = 1410
+WRONG_VALUE_FOR_TYPE = 1411
+TABLE_DEF_CHANGED = 1412
+SP_DUP_HANDLER = 1413
+SP_NOT_VAR_ARG = 1414
+SP_NO_RETSET = 1415
+CANT_CREATE_GEOMETRY_OBJECT = 1416
+FAILED_ROUTINE_BREAK_BINLOG = 1417
+BINLOG_UNSAFE_ROUTINE = 1418
+BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
+EXEC_STMT_WITH_OPEN_CURSOR = 1420
+STMT_HAS_NO_OPEN_CURSOR = 1421
+COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
+NO_DEFAULT_FOR_VIEW_FIELD = 1423
+SP_NO_RECURSION = 1424
+TOO_BIG_SCALE = 1425
+TOO_BIG_PRECISION = 1426
+M_BIGGER_THAN_D = 1427
+WRONG_LOCK_OF_SYSTEM_TABLE = 1428
+CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
+QUERY_ON_FOREIGN_DATA_SOURCE = 1430
+FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
+FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
+FOREIGN_DATA_STRING_INVALID = 1433
+CANT_CREATE_FEDERATED_TABLE = 1434
+TRG_IN_WRONG_SCHEMA = 1435
+STACK_OVERRUN_NEED_MORE = 1436
+TOO_LONG_BODY = 1437
+WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
+TOO_BIG_DISPLAYWIDTH = 1439
+XAER_DUPID = 1440
+DATETIME_FUNCTION_OVERFLOW = 1441
+CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
+VIEW_PREVENT_UPDATE = 1443
+PS_NO_RECURSION = 1444
+SP_CANT_SET_AUTOCOMMIT = 1445
+MALFORMED_DEFINER = 1446
+VIEW_FRM_NO_USER = 1447
+VIEW_OTHER_USER = 1448
+NO_SUCH_USER = 1449
+FORBID_SCHEMA_CHANGE = 1450
+ROW_IS_REFERENCED_2 = 1451
+NO_REFERENCED_ROW_2 = 1452
+SP_BAD_VAR_SHADOW = 1453
+TRG_NO_DEFINER = 1454
+OLD_FILE_FORMAT = 1455
+SP_RECURSION_LIMIT = 1456
+SP_PROC_TABLE_CORRUPT = 1457
+SP_WRONG_NAME = 1458
+TABLE_NEEDS_UPGRADE = 1459
+SP_NO_AGGREGATE = 1460
+MAX_PREPARED_STMT_COUNT_REACHED = 1461
+VIEW_RECURSIVE = 1462
+NON_GROUPING_FIELD_USED = 1463
+TABLE_CANT_HANDLE_SPKEYS = 1464
+NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
+USERNAME = 1466
+HOSTNAME = 1467
+WRONG_STRING_LENGTH = 1468
+ERROR_LAST = 1468
+
+# https://github.com/PyMySQL/PyMySQL/issues/607
+CONSTRAINT_FAILED = 4025

+ 31 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/FIELD_TYPE.py

@@ -0,0 +1,31 @@
+DECIMAL = 0
+TINY = 1
+SHORT = 2
+LONG = 3
+FLOAT = 4
+DOUBLE = 5
+NULL = 6
+TIMESTAMP = 7
+LONGLONG = 8
+INT24 = 9
+DATE = 10
+TIME = 11
+DATETIME = 12
+YEAR = 13
+NEWDATE = 14
+VARCHAR = 15
+BIT = 16
+JSON = 245
+NEWDECIMAL = 246
+ENUM = 247
+SET = 248
+TINY_BLOB = 249
+MEDIUM_BLOB = 250
+LONG_BLOB = 251
+BLOB = 252
+VAR_STRING = 253
+STRING = 254
+GEOMETRY = 255
+
+CHAR = TINY
+INTERVAL = ENUM

+ 15 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/FLAG.py

@@ -0,0 +1,15 @@
+NOT_NULL = 1
+PRI_KEY = 2
+UNIQUE_KEY = 4
+MULTIPLE_KEY = 8
+BLOB = 16
+UNSIGNED = 32
+ZEROFILL = 64
+BINARY = 128
+ENUM = 256
+AUTO_INCREMENT = 512
+TIMESTAMP = 1024
+SET = 2048
+PART_KEY = 16384
+GROUP = 32767
+UNIQUE = 65536

+ 10 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/SERVER_STATUS.py

@@ -0,0 +1,10 @@
+SERVER_STATUS_IN_TRANS = 1
+SERVER_STATUS_AUTOCOMMIT = 2
+SERVER_MORE_RESULTS_EXISTS = 8
+SERVER_QUERY_NO_GOOD_INDEX_USED = 16
+SERVER_QUERY_NO_INDEX_USED = 32
+SERVER_STATUS_CURSOR_EXISTS = 64
+SERVER_STATUS_LAST_ROW_SENT = 128
+SERVER_STATUS_DB_DROPPED = 256
+SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512
+SERVER_STATUS_METADATA_CHANGED = 1024

+ 0 - 0
ambt-preden-dailymigration-dev/package/pymysql/constants/__init__.py


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/CLIENT.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/COMMAND.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/CR.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/ER.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/FLAG.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc


BIN
ambt-preden-dailymigration-dev/package/pymysql/constants/__pycache__/__init__.cpython-38.pyc


+ 364 - 0
ambt-preden-dailymigration-dev/package/pymysql/converters.py

@@ -0,0 +1,364 @@
+import datetime
+from decimal import Decimal
+import re
+import time
+
+from .err import ProgrammingError
+from .constants import FIELD_TYPE
+
+
+def escape_item(val, charset, mapping=None):
+    if mapping is None:
+        mapping = encoders
+    encoder = mapping.get(type(val))
+
+    # Fallback to default when no encoder found
+    if not encoder:
+        try:
+            encoder = mapping[str]
+        except KeyError:
+            raise TypeError("no default type converter defined")
+
+    if encoder in (escape_dict, escape_sequence):
+        val = encoder(val, charset, mapping)
+    else:
+        val = encoder(val, mapping)
+    return val
+
+
+def escape_dict(val, charset, mapping=None):
+    n = {}
+    for k, v in val.items():
+        quoted = escape_item(v, charset, mapping)
+        n[k] = quoted
+    return n
+
+
+def escape_sequence(val, charset, mapping=None):
+    n = []
+    for item in val:
+        quoted = escape_item(item, charset, mapping)
+        n.append(quoted)
+    return "(" + ",".join(n) + ")"
+
+
+def escape_set(val, charset, mapping=None):
+    return ",".join([escape_item(x, charset, mapping) for x in val])
+
+
+def escape_bool(value, mapping=None):
+    return str(int(value))
+
+
+def escape_int(value, mapping=None):
+    return str(value)
+
+
+def escape_float(value, mapping=None):
+    s = repr(value)
+    if s in ("inf", "nan"):
+        raise ProgrammingError("%s can not be used with MySQL" % s)
+    if "e" not in s:
+        s += "e0"
+    return s
+
+
+_escape_table = [chr(x) for x in range(128)]
+_escape_table[0] = "\\0"
+_escape_table[ord("\\")] = "\\\\"
+_escape_table[ord("\n")] = "\\n"
+_escape_table[ord("\r")] = "\\r"
+_escape_table[ord("\032")] = "\\Z"
+_escape_table[ord('"')] = '\\"'
+_escape_table[ord("'")] = "\\'"
+
+
+def escape_string(value, mapping=None):
+    """escapes *value* without adding quote.
+
+    Value should be unicode
+    """
+    return value.translate(_escape_table)
+
+
+def escape_bytes_prefixed(value, mapping=None):
+    return "_binary'%s'" % value.decode("ascii", "surrogateescape").translate(
+        _escape_table
+    )
+
+
+def escape_bytes(value, mapping=None):
+    return "'%s'" % value.decode("ascii", "surrogateescape").translate(_escape_table)
+
+
+def escape_str(value, mapping=None):
+    return "'%s'" % escape_string(str(value), mapping)
+
+
+def escape_None(value, mapping=None):
+    return "NULL"
+
+
+def escape_timedelta(obj, mapping=None):
+    seconds = int(obj.seconds) % 60
+    minutes = int(obj.seconds // 60) % 60
+    hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
+    if obj.microseconds:
+        fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'"
+    else:
+        fmt = "'{0:02d}:{1:02d}:{2:02d}'"
+    return fmt.format(hours, minutes, seconds, obj.microseconds)
+
+
+def escape_time(obj, mapping=None):
+    if obj.microsecond:
+        fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
+    else:
+        fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'"
+    return fmt.format(obj)
+
+
+def escape_datetime(obj, mapping=None):
+    if obj.microsecond:
+        fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
+    else:
+        fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'"
+    return fmt.format(obj)
+
+
+def escape_date(obj, mapping=None):
+    fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'"
+    return fmt.format(obj)
+
+
+def escape_struct_time(obj, mapping=None):
+    return escape_datetime(datetime.datetime(*obj[:6]))
+
+
+def Decimal2Literal(o, d):
+    return format(o, "f")
+
+
+def _convert_second_fraction(s):
+    if not s:
+        return 0
+    # Pad zeros to ensure the fraction length in microseconds
+    s = s.ljust(6, "0")
+    return int(s[:6])
+
+
+DATETIME_RE = re.compile(
+    r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?"
+)
+
+
+def convert_datetime(obj):
+    """Returns a DATETIME or TIMESTAMP column value as a datetime object:
+
+      >>> datetime_or_None('2007-02-25 23:06:20')
+      datetime.datetime(2007, 2, 25, 23, 6, 20)
+      >>> datetime_or_None('2007-02-25T23:06:20')
+      datetime.datetime(2007, 2, 25, 23, 6, 20)
+
+    Illegal values are returned as None:
+
+      >>> datetime_or_None('2007-02-31T23:06:20') is None
+      True
+      >>> datetime_or_None('0000-00-00 00:00:00') is None
+      True
+
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+
+    m = DATETIME_RE.match(obj)
+    if not m:
+        return convert_date(obj)
+
+    try:
+        groups = list(m.groups())
+        groups[-1] = _convert_second_fraction(groups[-1])
+        return datetime.datetime(*[int(x) for x in groups])
+    except ValueError:
+        return convert_date(obj)
+
+
+TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
+
+
+def convert_timedelta(obj):
+    """Returns a TIME column as a timedelta object:
+
+      >>> timedelta_or_None('25:06:17')
+      datetime.timedelta(1, 3977)
+      >>> timedelta_or_None('-25:06:17')
+      datetime.timedelta(-2, 83177)
+
+    Illegal values are returned as None:
+
+      >>> timedelta_or_None('random crap') is None
+      True
+
+    Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
+    can accept values as (+|-)DD HH:MM:SS. The latter format will not
+    be parsed correctly by this function.
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+
+    m = TIMEDELTA_RE.match(obj)
+    if not m:
+        return obj
+
+    try:
+        groups = list(m.groups())
+        groups[-1] = _convert_second_fraction(groups[-1])
+        negate = -1 if groups[0] else 1
+        hours, minutes, seconds, microseconds = groups[1:]
+
+        tdelta = (
+            datetime.timedelta(
+                hours=int(hours),
+                minutes=int(minutes),
+                seconds=int(seconds),
+                microseconds=int(microseconds),
+            )
+            * negate
+        )
+        return tdelta
+    except ValueError:
+        return obj
+
+
+TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
+
+
+def convert_time(obj):
+    """Returns a TIME column as a time object:
+
+      >>> time_or_None('15:06:17')
+      datetime.time(15, 6, 17)
+
+    Illegal values are returned as None:
+
+      >>> time_or_None('-25:06:17') is None
+      True
+      >>> time_or_None('random crap') is None
+      True
+
+    Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
+    can accept values as (+|-)DD HH:MM:SS. The latter format will not
+    be parsed correctly by this function.
+
+    Also note that MySQL's TIME column corresponds more closely to
+    Python's timedelta and not time. However if you want TIME columns
+    to be treated as time-of-day and not a time offset, then you can
+    use set this function as the converter for FIELD_TYPE.TIME.
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+
+    m = TIME_RE.match(obj)
+    if not m:
+        return obj
+
+    try:
+        groups = list(m.groups())
+        groups[-1] = _convert_second_fraction(groups[-1])
+        hours, minutes, seconds, microseconds = groups
+        return datetime.time(
+            hour=int(hours),
+            minute=int(minutes),
+            second=int(seconds),
+            microsecond=int(microseconds),
+        )
+    except ValueError:
+        return obj
+
+
+def convert_date(obj):
+    """Returns a DATE column as a date object:
+
+      >>> date_or_None('2007-02-26')
+      datetime.date(2007, 2, 26)
+
+    Illegal values are returned as None:
+
+      >>> date_or_None('2007-02-31') is None
+      True
+      >>> date_or_None('0000-00-00') is None
+      True
+
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+    try:
+        return datetime.date(*[int(x) for x in obj.split("-", 2)])
+    except ValueError:
+        return obj
+
+
+def through(x):
+    return x
+
+
+# def convert_bit(b):
+#    b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
+#    return struct.unpack(">Q", b)[0]
+#
+#     the snippet above is right, but MySQLdb doesn't process bits,
+#     so we shouldn't either
+convert_bit = through
+
+
+encoders = {
+    bool: escape_bool,
+    int: escape_int,
+    float: escape_float,
+    str: escape_str,
+    bytes: escape_bytes,
+    tuple: escape_sequence,
+    list: escape_sequence,
+    set: escape_sequence,
+    frozenset: escape_sequence,
+    dict: escape_dict,
+    type(None): escape_None,
+    datetime.date: escape_date,
+    datetime.datetime: escape_datetime,
+    datetime.timedelta: escape_timedelta,
+    datetime.time: escape_time,
+    time.struct_time: escape_struct_time,
+    Decimal: Decimal2Literal,
+}
+
+
+decoders = {
+    FIELD_TYPE.BIT: convert_bit,
+    FIELD_TYPE.TINY: int,
+    FIELD_TYPE.SHORT: int,
+    FIELD_TYPE.LONG: int,
+    FIELD_TYPE.FLOAT: float,
+    FIELD_TYPE.DOUBLE: float,
+    FIELD_TYPE.LONGLONG: int,
+    FIELD_TYPE.INT24: int,
+    FIELD_TYPE.YEAR: int,
+    FIELD_TYPE.TIMESTAMP: convert_datetime,
+    FIELD_TYPE.DATETIME: convert_datetime,
+    FIELD_TYPE.TIME: convert_timedelta,
+    FIELD_TYPE.DATE: convert_date,
+    FIELD_TYPE.BLOB: through,
+    FIELD_TYPE.TINY_BLOB: through,
+    FIELD_TYPE.MEDIUM_BLOB: through,
+    FIELD_TYPE.LONG_BLOB: through,
+    FIELD_TYPE.STRING: through,
+    FIELD_TYPE.VAR_STRING: through,
+    FIELD_TYPE.VARCHAR: through,
+    FIELD_TYPE.DECIMAL: Decimal,
+    FIELD_TYPE.NEWDECIMAL: Decimal,
+}
+
+
+# for MySQLdb compatibility
+conversions = encoders.copy()
+conversions.update(decoders)
+Thing2Literal = escape_str

+ 496 - 0
ambt-preden-dailymigration-dev/package/pymysql/cursors.py

@@ -0,0 +1,496 @@
+import re
+from . import err
+
+
+#: Regular expression for :meth:`Cursor.executemany`.
+#: executemany only supports simple bulk insert.
+#: You can use it to load large dataset.
+RE_INSERT_VALUES = re.compile(
+    r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)"
+    + r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))"
+    + r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
+    re.IGNORECASE | re.DOTALL,
+)
+
+
+class Cursor:
+    """
+    This is the object you use to interact with the database.
+
+    Do not create an instance of a Cursor yourself. Call
+    connections.Connection.cursor().
+
+    See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
+    the specification.
+    """
+
+    #: Max statement size which :meth:`executemany` generates.
+    #:
+    #: Max size of allowed statement is max_allowed_packet - packet_header_size.
+    #: Default value of max_allowed_packet is 1048576.
+    max_stmt_length = 1024000
+
+    def __init__(self, connection):
+        self.connection = connection
+        self.description = None
+        self.rownumber = 0
+        self.rowcount = -1
+        self.arraysize = 1
+        self._executed = None
+        self._result = None
+        self._rows = None
+
+    def close(self):
+        """
+        Closing a cursor just exhausts all remaining data.
+        """
+        conn = self.connection
+        if conn is None:
+            return
+        try:
+            while self.nextset():
+                pass
+        finally:
+            self.connection = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc_info):
+        del exc_info
+        self.close()
+
+    def _get_db(self):
+        if not self.connection:
+            raise err.ProgrammingError("Cursor closed")
+        return self.connection
+
+    def _check_executed(self):
+        if not self._executed:
+            raise err.ProgrammingError("execute() first")
+
+    def _conv_row(self, row):
+        return row
+
+    def setinputsizes(self, *args):
+        """Does nothing, required by DB API."""
+
+    def setoutputsizes(self, *args):
+        """Does nothing, required by DB API."""
+
+    def _nextset(self, unbuffered=False):
+        """Get the next query set"""
+        conn = self._get_db()
+        current_result = self._result
+        if current_result is None or current_result is not conn._result:
+            return None
+        if not current_result.has_next:
+            return None
+        self._result = None
+        self._clear_result()
+        conn.next_result(unbuffered=unbuffered)
+        self._do_get_result()
+        return True
+
+    def nextset(self):
+        return self._nextset(False)
+
+    def _ensure_bytes(self, x, encoding=None):
+        if isinstance(x, str):
+            x = x.encode(encoding)
+        elif isinstance(x, (tuple, list)):
+            x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x)
+        return x
+
+    def _escape_args(self, args, conn):
+        if isinstance(args, (tuple, list)):
+            return tuple(conn.literal(arg) for arg in args)
+        elif isinstance(args, dict):
+            return {key: conn.literal(val) for (key, val) in args.items()}
+        else:
+            # If it's not a dictionary let's try escaping it anyways.
+            # Worst case it will throw a Value error
+            return conn.escape(args)
+
+    def mogrify(self, query, args=None):
+        """
+        Returns the exact string that is sent to the database by calling the
+        execute() method.
+
+        This method follows the extension to the DB API 2.0 followed by Psycopg.
+        """
+        conn = self._get_db()
+
+        if args is not None:
+            query = query % self._escape_args(args, conn)
+
+        return query
+
+    def execute(self, query, args=None):
+        """Execute a query
+
+        :param str query: Query to execute.
+
+        :param args: parameters used with query. (optional)
+        :type args: tuple, list or dict
+
+        :return: Number of affected rows
+        :rtype: int
+
+        If args is a list or tuple, %s can be used as a placeholder in the query.
+        If args is a dict, %(name)s can be used as a placeholder in the query.
+        """
+        while self.nextset():
+            pass
+
+        query = self.mogrify(query, args)
+
+        result = self._query(query)
+        self._executed = query
+        return result
+
+    def executemany(self, query, args):
+        # type: (str, list) -> int
+        """Run several data against one query
+
+        :param query: query to execute on server
+        :param args:  Sequence of sequences or mappings.  It is used as parameter.
+        :return: Number of rows affected, if any.
+
+        This method improves performance on multiple-row INSERT and
+        REPLACE. Otherwise it is equivalent to looping over args with
+        execute().
+        """
+        if not args:
+            return
+
+        m = RE_INSERT_VALUES.match(query)
+        if m:
+            q_prefix = m.group(1) % ()
+            q_values = m.group(2).rstrip()
+            q_postfix = m.group(3) or ""
+            assert q_values[0] == "(" and q_values[-1] == ")"
+            return self._do_execute_many(
+                q_prefix,
+                q_values,
+                q_postfix,
+                args,
+                self.max_stmt_length,
+                self._get_db().encoding,
+            )
+
+        self.rowcount = sum(self.execute(query, arg) for arg in args)
+        return self.rowcount
+
+    def _do_execute_many(
+        self, prefix, values, postfix, args, max_stmt_length, encoding
+    ):
+        conn = self._get_db()
+        escape = self._escape_args
+        if isinstance(prefix, str):
+            prefix = prefix.encode(encoding)
+        if isinstance(postfix, str):
+            postfix = postfix.encode(encoding)
+        sql = bytearray(prefix)
+        args = iter(args)
+        v = values % escape(next(args), conn)
+        if isinstance(v, str):
+            v = v.encode(encoding, "surrogateescape")
+        sql += v
+        rows = 0
+        for arg in args:
+            v = values % escape(arg, conn)
+            if isinstance(v, str):
+                v = v.encode(encoding, "surrogateescape")
+            if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
+                rows += self.execute(sql + postfix)
+                sql = bytearray(prefix)
+            else:
+                sql += b","
+            sql += v
+        rows += self.execute(sql + postfix)
+        self.rowcount = rows
+        return rows
+
+    def callproc(self, procname, args=()):
+        """Execute stored procedure procname with args
+
+        procname -- string, name of procedure to execute on server
+
+        args -- Sequence of parameters to use with procedure
+
+        Returns the original args.
+
+        Compatibility warning: PEP-249 specifies that any modified
+        parameters must be returned. This is currently impossible
+        as they are only available by storing them in a server
+        variable and then retrieved by a query. Since stored
+        procedures return zero or more result sets, there is no
+        reliable way to get at OUT or INOUT parameters via callproc.
+        The server variables are named @_procname_n, where procname
+        is the parameter above and n is the position of the parameter
+        (from zero). Once all result sets generated by the procedure
+        have been fetched, you can issue a SELECT @_procname_0, ...
+        query using .execute() to get any OUT or INOUT values.
+
+        Compatibility warning: The act of calling a stored procedure
+        itself creates an empty result set. This appears after any
+        result sets generated by the procedure. This is non-standard
+        behavior with respect to the DB-API. Be sure to use nextset()
+        to advance through all result sets; otherwise you may get
+        disconnected.
+        """
+        conn = self._get_db()
+        if args:
+            fmt = f"@_{procname}_%d=%s"
+            self._query(
+                "SET %s"
+                % ",".join(
+                    fmt % (index, conn.escape(arg)) for index, arg in enumerate(args)
+                )
+            )
+            self.nextset()
+
+        q = "CALL %s(%s)" % (
+            procname,
+            ",".join(["@_%s_%d" % (procname, i) for i in range(len(args))]),
+        )
+        self._query(q)
+        self._executed = q
+        return args
+
+    def fetchone(self):
+        """Fetch the next row"""
+        self._check_executed()
+        if self._rows is None or self.rownumber >= len(self._rows):
+            return None
+        result = self._rows[self.rownumber]
+        self.rownumber += 1
+        return result
+
+    def fetchmany(self, size=None):
+        """Fetch several rows"""
+        self._check_executed()
+        if self._rows is None:
+            return ()
+        end = self.rownumber + (size or self.arraysize)
+        result = self._rows[self.rownumber : end]
+        self.rownumber = min(end, len(self._rows))
+        return result
+
+    def fetchall(self):
+        """Fetch all the rows"""
+        self._check_executed()
+        if self._rows is None:
+            return ()
+        if self.rownumber:
+            result = self._rows[self.rownumber :]
+        else:
+            result = self._rows
+        self.rownumber = len(self._rows)
+        return result
+
+    def scroll(self, value, mode="relative"):
+        self._check_executed()
+        if mode == "relative":
+            r = self.rownumber + value
+        elif mode == "absolute":
+            r = value
+        else:
+            raise err.ProgrammingError("unknown scroll mode %s" % mode)
+
+        if not (0 <= r < len(self._rows)):
+            raise IndexError("out of range")
+        self.rownumber = r
+
+    def _query(self, q):
+        conn = self._get_db()
+        self._last_executed = q
+        self._clear_result()
+        conn.query(q)
+        self._do_get_result()
+        return self.rowcount
+
+    def _clear_result(self):
+        self.rownumber = 0
+        self._result = None
+
+        self.rowcount = 0
+        self.description = None
+        self.lastrowid = None
+        self._rows = None
+
+    def _do_get_result(self):
+        conn = self._get_db()
+
+        self._result = result = conn._result
+
+        self.rowcount = result.affected_rows
+        self.description = result.description
+        self.lastrowid = result.insert_id
+        self._rows = result.rows
+
+    def __iter__(self):
+        return iter(self.fetchone, None)
+
+    Warning = err.Warning
+    Error = err.Error
+    InterfaceError = err.InterfaceError
+    DatabaseError = err.DatabaseError
+    DataError = err.DataError
+    OperationalError = err.OperationalError
+    IntegrityError = err.IntegrityError
+    InternalError = err.InternalError
+    ProgrammingError = err.ProgrammingError
+    NotSupportedError = err.NotSupportedError
+
+
+class DictCursorMixin:
+    # You can override this to use OrderedDict or other dict-like types.
+    dict_type = dict
+
+    def _do_get_result(self):
+        super(DictCursorMixin, self)._do_get_result()
+        fields = []
+        if self.description:
+            for f in self._result.fields:
+                name = f.name
+                if name in fields:
+                    name = f.table_name + "." + name
+                fields.append(name)
+            self._fields = fields
+
+        if fields and self._rows:
+            self._rows = [self._conv_row(r) for r in self._rows]
+
+    def _conv_row(self, row):
+        if row is None:
+            return None
+        return self.dict_type(zip(self._fields, row))
+
+
+class DictCursor(DictCursorMixin, Cursor):
+    """A cursor which returns results as a dictionary"""
+
+
+class SSCursor(Cursor):
+    """
+    Unbuffered Cursor, mainly useful for queries that return a lot of data,
+    or for connections to remote servers over a slow network.
+
+    Instead of copying every row of data into a buffer, this will fetch
+    rows as needed. The upside of this is the client uses much less memory,
+    and rows are returned much faster when traveling over a slow network
+    or if the result set is very big.
+
+    There are limitations, though. The MySQL protocol doesn't support
+    returning the total number of rows, so the only way to tell how many rows
+    there are is to iterate over every row returned. Also, it currently isn't
+    possible to scroll backwards, as only the current row is held in memory.
+    """
+
+    def _conv_row(self, row):
+        return row
+
+    def close(self):
+        conn = self.connection
+        if conn is None:
+            return
+
+        if self._result is not None and self._result is conn._result:
+            self._result._finish_unbuffered_query()
+
+        try:
+            while self.nextset():
+                pass
+        finally:
+            self.connection = None
+
+    __del__ = close
+
+    def _query(self, q):
+        conn = self._get_db()
+        self._last_executed = q
+        self._clear_result()
+        conn.query(q, unbuffered=True)
+        self._do_get_result()
+        return self.rowcount
+
+    def nextset(self):
+        return self._nextset(unbuffered=True)
+
+    def read_next(self):
+        """Read next row"""
+        return self._conv_row(self._result._read_rowdata_packet_unbuffered())
+
+    def fetchone(self):
+        """Fetch next row"""
+        self._check_executed()
+        row = self.read_next()
+        if row is None:
+            return None
+        self.rownumber += 1
+        return row
+
+    def fetchall(self):
+        """
+        Fetch all, as per MySQLdb. Pretty useless for large queries, as
+        it is buffered. See fetchall_unbuffered(), if you want an unbuffered
+        generator version of this method.
+        """
+        return list(self.fetchall_unbuffered())
+
+    def fetchall_unbuffered(self):
+        """
+        Fetch all, implemented as a generator, which isn't to standard,
+        however, it doesn't make sense to return everything in a list, as that
+        would use ridiculous memory for large result sets.
+        """
+        return iter(self.fetchone, None)
+
+    def __iter__(self):
+        return self.fetchall_unbuffered()
+
+    def fetchmany(self, size=None):
+        """Fetch many"""
+        self._check_executed()
+        if size is None:
+            size = self.arraysize
+
+        rows = []
+        for i in range(size):
+            row = self.read_next()
+            if row is None:
+                break
+            rows.append(row)
+            self.rownumber += 1
+        return rows
+
+    def scroll(self, value, mode="relative"):
+        self._check_executed()
+
+        if mode == "relative":
+            if value < 0:
+                raise err.NotSupportedError(
+                    "Backwards scrolling not supported by this cursor"
+                )
+
+            for _ in range(value):
+                self.read_next()
+            self.rownumber += value
+        elif mode == "absolute":
+            if value < self.rownumber:
+                raise err.NotSupportedError(
+                    "Backwards scrolling not supported by this cursor"
+                )
+
+            end = value - self.rownumber
+            for _ in range(end):
+                self.read_next()
+            self.rownumber = value
+        else:
+            raise err.ProgrammingError("unknown scroll mode %s" % mode)
+
+
+class SSDictCursor(DictCursorMixin, SSCursor):
+    """An unbuffered cursor, which returns results as a dictionary"""

+ 143 - 0
ambt-preden-dailymigration-dev/package/pymysql/err.py

@@ -0,0 +1,143 @@
+import struct
+
+from .constants import ER
+
+
+class MySQLError(Exception):
+    """Exception related to operation with MySQL."""
+
+
+class Warning(Warning, MySQLError):
+    """Exception raised for important warnings like data truncations
+    while inserting, etc."""
+
+
+class Error(MySQLError):
+    """Exception that is the base class of all other error exceptions
+    (not Warning)."""
+
+
+class InterfaceError(Error):
+    """Exception raised for errors that are related to the database
+    interface rather than the database itself."""
+
+
+class DatabaseError(Error):
+    """Exception raised for errors that are related to the
+    database."""
+
+
+class DataError(DatabaseError):
+    """Exception raised for errors that are due to problems with the
+    processed data like division by zero, numeric value out of range,
+    etc."""
+
+
+class OperationalError(DatabaseError):
+    """Exception raised for errors that are related to the database's
+    operation and not necessarily under the control of the programmer,
+    e.g. an unexpected disconnect occurs, the data source name is not
+    found, a transaction could not be processed, a memory allocation
+    error occurred during processing, etc."""
+
+
+class IntegrityError(DatabaseError):
+    """Exception raised when the relational integrity of the database
+    is affected, e.g. a foreign key check fails, duplicate key,
+    etc."""
+
+
+class InternalError(DatabaseError):
+    """Exception raised when the database encounters an internal
+    error, e.g. the cursor is not valid anymore, the transaction is
+    out of sync, etc."""
+
+
+class ProgrammingError(DatabaseError):
+    """Exception raised for programming errors, e.g. table not found
+    or already exists, syntax error in the SQL statement, wrong number
+    of parameters specified, etc."""
+
+
+class NotSupportedError(DatabaseError):
+    """Exception raised in case a method or database API was used
+    which is not supported by the database, e.g. requesting a
+    .rollback() on a connection that does not support transaction or
+    has transactions turned off."""
+
+
+error_map = {}
+
+
+def _map_error(exc, *errors):
+    for error in errors:
+        error_map[error] = exc
+
+
+_map_error(
+    ProgrammingError,
+    ER.DB_CREATE_EXISTS,
+    ER.SYNTAX_ERROR,
+    ER.PARSE_ERROR,
+    ER.NO_SUCH_TABLE,
+    ER.WRONG_DB_NAME,
+    ER.WRONG_TABLE_NAME,
+    ER.FIELD_SPECIFIED_TWICE,
+    ER.INVALID_GROUP_FUNC_USE,
+    ER.UNSUPPORTED_EXTENSION,
+    ER.TABLE_MUST_HAVE_COLUMNS,
+    ER.CANT_DO_THIS_DURING_AN_TRANSACTION,
+    ER.WRONG_DB_NAME,
+    ER.WRONG_COLUMN_NAME,
+)
+_map_error(
+    DataError,
+    ER.WARN_DATA_TRUNCATED,
+    ER.WARN_NULL_TO_NOTNULL,
+    ER.WARN_DATA_OUT_OF_RANGE,
+    ER.NO_DEFAULT,
+    ER.PRIMARY_CANT_HAVE_NULL,
+    ER.DATA_TOO_LONG,
+    ER.DATETIME_FUNCTION_OVERFLOW,
+    ER.TRUNCATED_WRONG_VALUE_FOR_FIELD,
+    ER.ILLEGAL_VALUE_FOR_TYPE,
+)
+_map_error(
+    IntegrityError,
+    ER.DUP_ENTRY,
+    ER.NO_REFERENCED_ROW,
+    ER.NO_REFERENCED_ROW_2,
+    ER.ROW_IS_REFERENCED,
+    ER.ROW_IS_REFERENCED_2,
+    ER.CANNOT_ADD_FOREIGN,
+    ER.BAD_NULL_ERROR,
+)
+_map_error(
+    NotSupportedError,
+    ER.WARNING_NOT_COMPLETE_ROLLBACK,
+    ER.NOT_SUPPORTED_YET,
+    ER.FEATURE_DISABLED,
+    ER.UNKNOWN_STORAGE_ENGINE,
+)
+_map_error(
+    OperationalError,
+    ER.DBACCESS_DENIED_ERROR,
+    ER.ACCESS_DENIED_ERROR,
+    ER.CON_COUNT_ERROR,
+    ER.TABLEACCESS_DENIED_ERROR,
+    ER.COLUMNACCESS_DENIED_ERROR,
+    ER.CONSTRAINT_FAILED,
+    ER.LOCK_DEADLOCK,
+)
+
+
+del _map_error, ER
+
+
+def raise_mysql_exception(data):
+    errno = struct.unpack("<h", data[1:3])[0]
+    errval = data[9:].decode("utf-8", "replace")
+    errorclass = error_map.get(errno)
+    if errorclass is None:
+        errorclass = InternalError if errno < 1000 else OperationalError
+    raise errorclass(errno, errval)

+ 18 - 0
ambt-preden-dailymigration-dev/package/pymysql/optionfile.py

@@ -0,0 +1,18 @@
+import configparser
+
+
+class Parser(configparser.RawConfigParser):
+    def __init__(self, **kwargs):
+        kwargs["allow_no_value"] = True
+        configparser.RawConfigParser.__init__(self, **kwargs)
+
+    def __remove_quotes(self, value):
+        quotes = ["'", '"']
+        for quote in quotes:
+            if len(value) >= 2 and value[0] == value[-1] == quote:
+                return value[1:-1]
+        return value
+
+    def get(self, section, option):
+        value = configparser.RawConfigParser.get(self, section, option)
+        return self.__remove_quotes(value)

+ 358 - 0
ambt-preden-dailymigration-dev/package/pymysql/protocol.py

@@ -0,0 +1,358 @@
+# Python implementation of low level MySQL client-server protocol
+# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
+
+from .charset import MBLENGTH
+from .constants import FIELD_TYPE, SERVER_STATUS
+from . import err
+
+import struct
+import sys
+
+
+DEBUG = False
+
+NULL_COLUMN = 251
+UNSIGNED_CHAR_COLUMN = 251
+UNSIGNED_SHORT_COLUMN = 252
+UNSIGNED_INT24_COLUMN = 253
+UNSIGNED_INT64_COLUMN = 254
+
+
+def dump_packet(data):  # pragma: no cover
+    def printable(data):
+        if 32 <= data < 127:
+            return chr(data)
+        return "."
+
+    try:
+        print("packet length:", len(data))
+        for i in range(1, 7):
+            f = sys._getframe(i)
+            print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
+        print("-" * 66)
+    except ValueError:
+        pass
+    dump_data = [data[i : i + 16] for i in range(0, min(len(data), 256), 16)]
+    for d in dump_data:
+        print(
+            " ".join("{:02X}".format(x) for x in d)
+            + "   " * (16 - len(d))
+            + " " * 2
+            + "".join(printable(x) for x in d)
+        )
+    print("-" * 66)
+    print()
+
+
+class MysqlPacket:
+    """Representation of a MySQL response packet.
+
+    Provides an interface for reading/parsing the packet results.
+    """
+
+    __slots__ = ("_position", "_data")
+
+    def __init__(self, data, encoding):
+        self._position = 0
+        self._data = data
+
+    def get_all_data(self):
+        return self._data
+
+    def read(self, size):
+        """Read the first 'size' bytes in packet and advance cursor past them."""
+        result = self._data[self._position : (self._position + size)]
+        if len(result) != size:
+            error = (
+                "Result length not requested length:\n"
+                "Expected=%s.  Actual=%s.  Position: %s.  Data Length: %s"
+                % (size, len(result), self._position, len(self._data))
+            )
+            if DEBUG:
+                print(error)
+                self.dump()
+            raise AssertionError(error)
+        self._position += size
+        return result
+
+    def read_all(self):
+        """Read all remaining data in the packet.
+
+        (Subsequent read() will return errors.)
+        """
+        result = self._data[self._position :]
+        self._position = None  # ensure no subsequent read()
+        return result
+
+    def advance(self, length):
+        """Advance the cursor in data buffer 'length' bytes."""
+        new_position = self._position + length
+        if new_position < 0 or new_position > len(self._data):
+            raise Exception(
+                "Invalid advance amount (%s) for cursor.  "
+                "Position=%s" % (length, new_position)
+            )
+        self._position = new_position
+
+    def rewind(self, position=0):
+        """Set the position of the data buffer cursor to 'position'."""
+        if position < 0 or position > len(self._data):
+            raise Exception("Invalid position to rewind cursor to: %s." % position)
+        self._position = position
+
+    def get_bytes(self, position, length=1):
+        """Get 'length' bytes starting at 'position'.
+
+        Position is start of payload (first four packet header bytes are not
+        included) starting at index '0'.
+
+        No error checking is done.  If requesting outside end of buffer
+        an empty string (or string shorter than 'length') may be returned!
+        """
+        return self._data[position : (position + length)]
+
+    def read_uint8(self):
+        result = self._data[self._position]
+        self._position += 1
+        return result
+
+    def read_uint16(self):
+        result = struct.unpack_from("<H", self._data, self._position)[0]
+        self._position += 2
+        return result
+
+    def read_uint24(self):
+        low, high = struct.unpack_from("<HB", self._data, self._position)
+        self._position += 3
+        return low + (high << 16)
+
+    def read_uint32(self):
+        result = struct.unpack_from("<I", self._data, self._position)[0]
+        self._position += 4
+        return result
+
+    def read_uint64(self):
+        result = struct.unpack_from("<Q", self._data, self._position)[0]
+        self._position += 8
+        return result
+
+    def read_string(self):
+        end_pos = self._data.find(b"\0", self._position)
+        if end_pos < 0:
+            return None
+        result = self._data[self._position : end_pos]
+        self._position = end_pos + 1
+        return result
+
+    def read_length_encoded_integer(self):
+        """Read a 'Length Coded Binary' number from the data buffer.
+
+        Length coded numbers can be anywhere from 1 to 9 bytes depending
+        on the value of the first byte.
+        """
+        c = self.read_uint8()
+        if c == NULL_COLUMN:
+            return None
+        if c < UNSIGNED_CHAR_COLUMN:
+            return c
+        elif c == UNSIGNED_SHORT_COLUMN:
+            return self.read_uint16()
+        elif c == UNSIGNED_INT24_COLUMN:
+            return self.read_uint24()
+        elif c == UNSIGNED_INT64_COLUMN:
+            return self.read_uint64()
+
+    def read_length_coded_string(self):
+        """Read a 'Length Coded String' from the data buffer.
+
+        A 'Length Coded String' consists first of a length coded
+        (unsigned, positive) integer represented in 1-9 bytes followed by
+        that many bytes of binary data.  (For example "cat" would be "3cat".)
+        """
+        length = self.read_length_encoded_integer()
+        if length is None:
+            return None
+        return self.read(length)
+
+    def read_struct(self, fmt):
+        s = struct.Struct(fmt)
+        result = s.unpack_from(self._data, self._position)
+        self._position += s.size
+        return result
+
+    def is_ok_packet(self):
+        # https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
+        return self._data[0] == 0 and len(self._data) >= 7
+
+    def is_eof_packet(self):
+        # http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
+        # Caution: \xFE may be LengthEncodedInteger.
+        # If \xFE is LengthEncodedInteger header, 8bytes followed.
+        return self._data[0] == 0xFE and len(self._data) < 9
+
+    def is_auth_switch_request(self):
+        # http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
+        return self._data[0] == 0xFE
+
+    def is_extra_auth_data(self):
+        # https://dev.mysql.com/doc/internals/en/successful-authentication.html
+        return self._data[0] == 1
+
+    def is_resultset_packet(self):
+        field_count = self._data[0]
+        return 1 <= field_count <= 250
+
+    def is_load_local_packet(self):
+        return self._data[0] == 0xFB
+
+    def is_error_packet(self):
+        return self._data[0] == 0xFF
+
+    def check_error(self):
+        if self.is_error_packet():
+            self.raise_for_error()
+
+    def raise_for_error(self):
+        self.rewind()
+        self.advance(1)  # field_count == error (we already know that)
+        errno = self.read_uint16()
+        if DEBUG:
+            print("errno =", errno)
+        err.raise_mysql_exception(self._data)
+
+    def dump(self):
+        dump_packet(self._data)
+
+
+class FieldDescriptorPacket(MysqlPacket):
+    """A MysqlPacket that represents a specific column's metadata in the result.
+
+    Parsing is automatically done and the results are exported via public
+    attributes on the class such as: db, table_name, name, length, type_code.
+    """
+
+    def __init__(self, data, encoding):
+        MysqlPacket.__init__(self, data, encoding)
+        self._parse_field_descriptor(encoding)
+
+    def _parse_field_descriptor(self, encoding):
+        """Parse the 'Field Descriptor' (Metadata) packet.
+
+        This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
+        """
+        self.catalog = self.read_length_coded_string()
+        self.db = self.read_length_coded_string()
+        self.table_name = self.read_length_coded_string().decode(encoding)
+        self.org_table = self.read_length_coded_string().decode(encoding)
+        self.name = self.read_length_coded_string().decode(encoding)
+        self.org_name = self.read_length_coded_string().decode(encoding)
+        (
+            self.charsetnr,
+            self.length,
+            self.type_code,
+            self.flags,
+            self.scale,
+        ) = self.read_struct("<xHIBHBxx")
+        # 'default' is a length coded binary and is still in the buffer?
+        # not used for normal result sets...
+
+    def description(self):
+        """Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
+        return (
+            self.name,
+            self.type_code,
+            None,  # TODO: display_length; should this be self.length?
+            self.get_column_length(),  # 'internal_size'
+            self.get_column_length(),  # 'precision'  # TODO: why!?!?
+            self.scale,
+            self.flags % 2 == 0,
+        )
+
+    def get_column_length(self):
+        if self.type_code == FIELD_TYPE.VAR_STRING:
+            mblen = MBLENGTH.get(self.charsetnr, 1)
+            return self.length // mblen
+        return self.length
+
+    def __str__(self):
+        return "%s %r.%r.%r, type=%s, flags=%x" % (
+            self.__class__,
+            self.db,
+            self.table_name,
+            self.name,
+            self.type_code,
+            self.flags,
+        )
+
+
+class OKPacketWrapper:
+    """
+    OK Packet Wrapper. It uses an existing packet object, and wraps
+    around it, exposing useful variables while still providing access
+    to the original packet objects variables and methods.
+    """
+
+    def __init__(self, from_packet):
+        if not from_packet.is_ok_packet():
+            raise ValueError(
+                "Cannot create "
+                + str(self.__class__.__name__)
+                + " object from invalid packet type"
+            )
+
+        self.packet = from_packet
+        self.packet.advance(1)
+
+        self.affected_rows = self.packet.read_length_encoded_integer()
+        self.insert_id = self.packet.read_length_encoded_integer()
+        self.server_status, self.warning_count = self.read_struct("<HH")
+        self.message = self.packet.read_all()
+        self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
+
+    def __getattr__(self, key):
+        return getattr(self.packet, key)
+
+
+class EOFPacketWrapper:
+    """
+    EOF Packet Wrapper. It uses an existing packet object, and wraps
+    around it, exposing useful variables while still providing access
+    to the original packet objects variables and methods.
+    """
+
+    def __init__(self, from_packet):
+        if not from_packet.is_eof_packet():
+            raise ValueError(
+                f"Cannot create '{self.__class__}' object from invalid packet type"
+            )
+
+        self.packet = from_packet
+        self.warning_count, self.server_status = self.packet.read_struct("<xhh")
+        if DEBUG:
+            print("server_status=", self.server_status)
+        self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
+
+    def __getattr__(self, key):
+        return getattr(self.packet, key)
+
+
+class LoadLocalPacketWrapper:
+    """
+    Load Local Packet Wrapper. It uses an existing packet object, and wraps
+    around it, exposing useful variables while still providing access
+    to the original packet objects variables and methods.
+    """
+
+    def __init__(self, from_packet):
+        if not from_packet.is_load_local_packet():
+            raise ValueError(
+                f"Cannot create '{self.__class__}' object from invalid packet type"
+            )
+
+        self.packet = from_packet
+        self.filename = self.packet.get_all_data()[1:]
+        if DEBUG:
+            print("filename=", self.filename)
+
+    def __getattr__(self, key):
+        return getattr(self.packet, key)

+ 20 - 0
ambt-preden-dailymigration-dev/package/pymysql/times.py

@@ -0,0 +1,20 @@
+from time import localtime
+from datetime import date, datetime, time, timedelta
+
+
+Date = date
+Time = time
+TimeDelta = timedelta
+Timestamp = datetime
+
+
+def DateFromTicks(ticks):
+    return date(*localtime(ticks)[:3])
+
+
+def TimeFromTicks(ticks):
+    return time(*localtime(ticks)[3:6])
+
+
+def TimestampFromTicks(ticks):
+    return datetime(*localtime(ticks)[:6])

+ 99 - 0
ambt-preden-monthlymigration-dev/lambda_function.py

@@ -0,0 +1,99 @@
+import sys
+import pymysql
+import json
+import decimal
+import boto3
+import uuid
+import logging
+import os
+import time
+from datetime import datetime, timedelta
+from botocore.exceptions import ClientError
+
+class DecimalnDateTimeEncoder(json.JSONEncoder):
+    def default(self, obj):
+        if isinstance(obj, decimal.Decimal) or isinstance(obj, datetime):
+            return str(obj)
+        return json.JSONEncoder.default(self, obj)
+
+def get_secret():
+    secret_name = "ambt-preden-gurigalmae-valleydb"
+    region_name = "ap-northeast-2"
+
+    # Create a Secrets Manager client
+    session = boto3.session.Session()
+    client = session.client(
+        service_name='secretsmanager',
+        region_name=region_name
+    )
+
+    # In this sample we only handle the specific exceptions for the 'GetSecretValue' API.
+    # See https://docs.aws.amazon.com/secretsmanager/latest/apireference/API_GetSecretValue.html
+    # We rethrow the exception by default.
+
+    try:
+        get_secret_value_response = client.get_secret_value(
+            SecretId=secret_name
+        )
+    except ClientError as e:
+        if e.response['Error']['Code'] == 'DecryptionFailureException':
+            # Secrets Manager can't decrypt the protected secret text using the provided KMS key.
+            # Deal with the exception here, and/or rethrow at your discretion.
+            raise e
+        elif e.response['Error']['Code'] == 'InternalServiceErrorException':
+            # An error occurred on the server side.
+            # Deal with the exception here, and/or rethrow at your discretion.
+            raise e
+        elif e.response['Error']['Code'] == 'InvalidParameterException':
+            # You provided an invalid value for a parameter.
+            # Deal with the exception here, and/or rethrow at your discretion.
+            raise e
+        elif e.response['Error']['Code'] == 'InvalidRequestException':
+            # You provided a parameter value that is not valid for the current state of the resource.
+            # Deal with the exception here, and/or rethrow at your discretion.
+            raise e
+        elif e.response['Error']['Code'] == 'ResourceNotFoundException':
+            # We can't find the resource that you asked for.
+            # Deal with the exception here, and/or rethrow at your discretion.
+            raise e
+    else:
+        # Decrypts secret using the associated KMS CMK.
+        # Depending on whether the secret is a string or binary, one of these fields will be populated.
+        if 'SecretString' in get_secret_value_response:
+            secret = get_secret_value_response['SecretString']
+        return json.loads(secret)
+        
+
+def lambda_handler(event, context):
+    os.environ['TZ'] = 'Asia/Seoul'
+    time.tzset()
+    
+    logger = logging.getLogger()
+    logger.setLevel(logging.INFO)
+    
+    secret = get_secret()
+    try:
+        conn = pymysql.connect(host=secret['host'], port=int(secret['port']), user=secret['username'], passwd=secret['password'], db=secret['dbname'], connect_timeout=5)
+    except:
+        logger.error("ERROR: Unexpected error: Could not connect to MySql instance.")
+        sys.exit()
+
+    logger.info("SUCCESS: Connection to RDS mysql instance succeeded")
+    
+    today = datetime.now()
+    yesterday = today - timedelta(days=1)
+    
+    sql = "SELECT * FROM MONTHENERGY WHERE ENERGY_YEAR={} AND ENERGY_MONTH={}".format(yesterday.year, yesterday.month)
+    with conn.cursor(pymysql.cursors.DictCursor) as cur:
+        cur.execute(sql)
+        logger.info(sql)
+        rows = cur.fetchall()
+
+    data = ""
+    for row in rows:
+        data += json.dumps(row, cls=DecimalnDateTimeEncoder) + '\n'
+        logger.info(row)
+
+    s3=boto3.resource('s3')
+    object = s3.Object('hdci-ambt-homenetserver-raw','dev/site_name=gurigalmae/table_name=monthenergy/year={}/ambt-preden-lambda-migration-dev-{}-{}-{}'.format(yesterday.year, yesterday.year, yesterday.month, str(uuid.uuid4())))
+    return object.put(Body=data)

BIN
ambt-preden-monthlymigration-dev/my-deployment-package.zip


+ 1 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/INSTALLER

@@ -0,0 +1 @@
+pip

+ 19 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/LICENSE

@@ -0,0 +1,19 @@
+Copyright (c) 2010, 2013 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.

+ 180 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/METADATA

@@ -0,0 +1,180 @@
+Metadata-Version: 2.1
+Name: PyMySQL
+Version: 1.0.2
+Summary: Pure Python MySQL Driver
+Home-page: https://github.com/PyMySQL/PyMySQL/
+Author: yutaka.matsubara
+Author-email: yutaka.matsubara@gmail.com
+Maintainer: Inada Naoki
+Maintainer-email: songofacandy@gmail.com
+License: "MIT"
+Project-URL: Documentation, https://pymysql.readthedocs.io/
+Keywords: MySQL
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.6
+Classifier: Programming Language :: Python :: 3.7
+Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3.9
+Classifier: Programming Language :: Python :: Implementation :: CPython
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Database
+Requires-Python: >=3.6
+Provides-Extra: ed25519
+Requires-Dist: PyNaCl (>=1.4.0) ; extra == 'ed25519'
+Provides-Extra: rsa
+Requires-Dist: cryptography ; extra == 'rsa'
+
+.. image:: https://readthedocs.org/projects/pymysql/badge/?version=latest
+    :target: https://pymysql.readthedocs.io/
+    :alt: Documentation Status
+
+.. image:: https://coveralls.io/repos/PyMySQL/PyMySQL/badge.svg?branch=master&service=github
+    :target: https://coveralls.io/github/PyMySQL/PyMySQL?branch=master
+
+.. image:: https://img.shields.io/lgtm/grade/python/g/PyMySQL/PyMySQL.svg?logo=lgtm&logoWidth=18
+    :target: https://lgtm.com/projects/g/PyMySQL/PyMySQL/context:python
+
+
+PyMySQL
+=======
+
+.. contents:: Table of Contents
+   :local:
+
+This package contains a pure-Python MySQL client library, based on `PEP 249`_.
+
+Most public APIs are compatible with mysqlclient and MySQLdb.
+
+NOTE: PyMySQL doesn't support low level APIs `_mysql` provides like `data_seek`,
+`store_result`, and `use_result`. You should use high level APIs defined in `PEP 249`_.
+But some APIs like `autocommit` and `ping` are supported because `PEP 249`_ doesn't cover
+their usecase.
+
+.. _`PEP 249`: https://www.python.org/dev/peps/pep-0249/
+
+
+Requirements
+-------------
+
+* Python -- one of the following:
+
+  - CPython_ : 3.6 and newer
+  - PyPy_ : Latest 3.x version
+
+* MySQL Server -- one of the following:
+
+  - MySQL_ >= 5.6
+  - MariaDB_ >= 10.0
+
+.. _CPython: https://www.python.org/
+.. _PyPy: https://pypy.org/
+.. _MySQL: https://www.mysql.com/
+.. _MariaDB: https://mariadb.org/
+
+
+Installation
+------------
+
+Package is uploaded on `PyPI <https://pypi.org/project/PyMySQL>`_.
+
+You can install it with pip::
+
+    $ python3 -m pip install PyMySQL
+
+To use "sha256_password" or "caching_sha2_password" for authenticate,
+you need to install additional dependency::
+
+   $ python3 -m pip install PyMySQL[rsa]
+
+To use MariaDB's "ed25519" authentication method, you need to install
+additional dependency::
+
+   $ python3 -m pip install PyMySQL[ed25519]
+
+
+Documentation
+-------------
+
+Documentation is available online: https://pymysql.readthedocs.io/
+
+For support, please refer to the `StackOverflow
+<https://stackoverflow.com/questions/tagged/pymysql>`_.
+
+
+Example
+-------
+
+The following examples make use of a simple table
+
+.. code:: sql
+
+   CREATE TABLE `users` (
+       `id` int(11) NOT NULL AUTO_INCREMENT,
+       `email` varchar(255) COLLATE utf8_bin NOT NULL,
+       `password` varchar(255) COLLATE utf8_bin NOT NULL,
+       PRIMARY KEY (`id`)
+   ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin
+   AUTO_INCREMENT=1 ;
+
+
+.. code:: python
+
+    import pymysql.cursors
+
+    # Connect to the database
+    connection = pymysql.connect(host='localhost',
+                                 user='user',
+                                 password='passwd',
+                                 database='db',
+                                 cursorclass=pymysql.cursors.DictCursor)
+
+    with connection:
+        with connection.cursor() as cursor:
+            # Create a new record
+            sql = "INSERT INTO `users` (`email`, `password`) VALUES (%s, %s)"
+            cursor.execute(sql, ('webmaster@python.org', 'very-secret'))
+
+        # connection is not autocommit by default. So you must commit to save
+        # your changes.
+        connection.commit()
+
+        with connection.cursor() as cursor:
+            # Read a single record
+            sql = "SELECT `id`, `password` FROM `users` WHERE `email`=%s"
+            cursor.execute(sql, ('webmaster@python.org',))
+            result = cursor.fetchone()
+            print(result)
+
+
+This example will print:
+
+.. code:: python
+
+    {'password': 'very-secret', 'id': 1}
+
+
+Resources
+---------
+
+* DB-API 2.0: https://www.python.org/dev/peps/pep-0249/
+
+* MySQL Reference Manuals: https://dev.mysql.com/doc/
+
+* MySQL client/server protocol:
+  https://dev.mysql.com/doc/internals/en/client-server-protocol.html
+
+* "Connector" channel in MySQL Community Slack:
+  https://lefred.be/mysql-community-on-slack/
+
+* PyMySQL mailing list: https://groups.google.com/forum/#!forum/pymysql-users
+
+License
+-------
+
+PyMySQL is released under the MIT License. See LICENSE for more information.
+
+

+ 43 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/RECORD

@@ -0,0 +1,43 @@
+PyMySQL-1.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+PyMySQL-1.0.2.dist-info/LICENSE,sha256=MUEg3GXwgA9ziksxQAx27hTezR--d86cNUCkIbhup7Y,1070
+PyMySQL-1.0.2.dist-info/METADATA,sha256=hz4Fdo8sOFKcNqZ8wp4Bp-txNCOBCnw9-leYR7QBZ5I,5119
+PyMySQL-1.0.2.dist-info/RECORD,,
+PyMySQL-1.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+PyMySQL-1.0.2.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92
+PyMySQL-1.0.2.dist-info/top_level.txt,sha256=IKlV-f4o90sOdnMd6HBvo0l2nqfJOGUzkwZeaEEGuRg,8
+pymysql/__init__.py,sha256=XL7skPUK4cbKiek68T0vMob-L4YkIRLb2KX4hdMZVvM,4391
+pymysql/__pycache__/__init__.cpython-38.pyc,,
+pymysql/__pycache__/_auth.cpython-38.pyc,,
+pymysql/__pycache__/charset.cpython-38.pyc,,
+pymysql/__pycache__/connections.cpython-38.pyc,,
+pymysql/__pycache__/converters.cpython-38.pyc,,
+pymysql/__pycache__/cursors.cpython-38.pyc,,
+pymysql/__pycache__/err.cpython-38.pyc,,
+pymysql/__pycache__/optionfile.cpython-38.pyc,,
+pymysql/__pycache__/protocol.cpython-38.pyc,,
+pymysql/__pycache__/times.cpython-38.pyc,,
+pymysql/_auth.py,sha256=l1VtBwDpCtTkalgYQFASO-rj-vEd3DGYR8g-eQjNF1U,7399
+pymysql/charset.py,sha256=JCvshFnNf4vzkpXc6uPCyg07qGNfZaVZoxrFqzVlKFs,10293
+pymysql/connections.py,sha256=EwKWqFIWlx6kbOeDFIhMFpjJ9-pyF140E5ouKgrrYfY,51251
+pymysql/constants/CLIENT.py,sha256=SSvMFPZCTVMU1UWa4zOrfhYMDdR2wG2mS0E5GzJhDsg,878
+pymysql/constants/COMMAND.py,sha256=TGITAUcNWlq2Gwg2wv5UK2ykdTd4LYTk_EcJJOCpGIc,679
+pymysql/constants/CR.py,sha256=oHyD9dnR1DUX7hd42rcamMnFrWhjUZz7E4S6qQWSQb4,1927
+pymysql/constants/ER.py,sha256=cH5wgU-e70wd0uSygNR5IFCnnXcrR9WLwJPMH22bhUw,12296
+pymysql/constants/FIELD_TYPE.py,sha256=ytFzgAnGmb9hvdsBlnK68qdZv_a6jYFIXT6VSAb60z8,370
+pymysql/constants/FLAG.py,sha256=Fy-PrCLnUI7fx_o5WypYnUAzWAM0E9d5yL8fFRVKffY,214
+pymysql/constants/SERVER_STATUS.py,sha256=m28Iq5JGCFCWLhafE73-iOvw_9gDGqnytW3NkHpbugA,333
+pymysql/constants/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pymysql/constants/__pycache__/CLIENT.cpython-38.pyc,,
+pymysql/constants/__pycache__/COMMAND.cpython-38.pyc,,
+pymysql/constants/__pycache__/CR.cpython-38.pyc,,
+pymysql/constants/__pycache__/ER.cpython-38.pyc,,
+pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc,,
+pymysql/constants/__pycache__/FLAG.cpython-38.pyc,,
+pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc,,
+pymysql/constants/__pycache__/__init__.cpython-38.pyc,,
+pymysql/converters.py,sha256=MBXTOCXSyewMculaRliBEzPVkOKXLiRMqvIXih9Akrg,9430
+pymysql/cursors.py,sha256=1E79f3vysxygyfZMhvR6-yFDfysRn3Go8xZTywteh4o,15366
+pymysql/err.py,sha256=bpxayM4IUnFQAd8bUZ3PFsFomi9QSfBk-0TJXyKU2FI,3773
+pymysql/optionfile.py,sha256=ehPrZW4d7pcEvXGAEpsKgLdXpFnIQD93yF7T_jHjoRk,573
+pymysql/protocol.py,sha256=Ur8xXkVvyFc6m5CA34QrHBasADvS_NPFsWU-Q3flRYA,11859
+pymysql/times.py,sha256=_qXgDaYwsHntvpIKSKXp1rrYIgtq6Z9pLyLnO2XNoL0,360

+ 0 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/REQUESTED


+ 5 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.36.2)
+Root-Is-Purelib: true
+Tag: py3-none-any
+

+ 1 - 0
ambt-preden-monthlymigration-dev/package/PyMySQL-1.0.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+pymysql

+ 185 - 0
ambt-preden-monthlymigration-dev/package/pymysql/__init__.py

@@ -0,0 +1,185 @@
+"""
+PyMySQL: A pure-Python MySQL client library.
+
+Copyright (c) 2010-2016 PyMySQL contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
+"""
+import sys
+
+from .constants import FIELD_TYPE
+from .err import (
+    Warning,
+    Error,
+    InterfaceError,
+    DataError,
+    DatabaseError,
+    OperationalError,
+    IntegrityError,
+    InternalError,
+    NotSupportedError,
+    ProgrammingError,
+    MySQLError,
+)
+from .times import (
+    Date,
+    Time,
+    Timestamp,
+    DateFromTicks,
+    TimeFromTicks,
+    TimestampFromTicks,
+)
+
+
+VERSION = (1, 0, 2, None)
+if VERSION[3] is not None:
+    VERSION_STRING = "%d.%d.%d_%s" % VERSION
+else:
+    VERSION_STRING = "%d.%d.%d" % VERSION[:3]
+threadsafety = 1
+apilevel = "2.0"
+paramstyle = "pyformat"
+
+from . import connections  # noqa: E402
+
+
+class DBAPISet(frozenset):
+    def __ne__(self, other):
+        if isinstance(other, set):
+            return frozenset.__ne__(self, other)
+        else:
+            return other not in self
+
+    def __eq__(self, other):
+        if isinstance(other, frozenset):
+            return frozenset.__eq__(self, other)
+        else:
+            return other in self
+
+    def __hash__(self):
+        return frozenset.__hash__(self)
+
+
+STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING])
+BINARY = DBAPISet(
+    [
+        FIELD_TYPE.BLOB,
+        FIELD_TYPE.LONG_BLOB,
+        FIELD_TYPE.MEDIUM_BLOB,
+        FIELD_TYPE.TINY_BLOB,
+    ]
+)
+NUMBER = DBAPISet(
+    [
+        FIELD_TYPE.DECIMAL,
+        FIELD_TYPE.DOUBLE,
+        FIELD_TYPE.FLOAT,
+        FIELD_TYPE.INT24,
+        FIELD_TYPE.LONG,
+        FIELD_TYPE.LONGLONG,
+        FIELD_TYPE.TINY,
+        FIELD_TYPE.YEAR,
+    ]
+)
+DATE = DBAPISet([FIELD_TYPE.DATE, FIELD_TYPE.NEWDATE])
+TIME = DBAPISet([FIELD_TYPE.TIME])
+TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
+DATETIME = TIMESTAMP
+ROWID = DBAPISet()
+
+
+def Binary(x):
+    """Return x as a binary type."""
+    return bytes(x)
+
+
+Connect = connect = Connection = connections.Connection
+
+
+def get_client_info():  # for MySQLdb compatibility
+    version = VERSION
+    if VERSION[3] is None:
+        version = VERSION[:3]
+    return ".".join(map(str, version))
+
+
+# we include a doctored version_info here for MySQLdb compatibility
+version_info = (1, 4, 0, "final", 0)
+
+NULL = "NULL"
+
+__version__ = get_client_info()
+
+
+def thread_safe():
+    return True  # match MySQLdb.thread_safe()
+
+
+def install_as_MySQLdb():
+    """
+    After this function is called, any application that imports MySQLdb or
+    _mysql will unwittingly actually use pymysql.
+    """
+    sys.modules["MySQLdb"] = sys.modules["_mysql"] = sys.modules["pymysql"]
+
+
+__all__ = [
+    "BINARY",
+    "Binary",
+    "Connect",
+    "Connection",
+    "DATE",
+    "Date",
+    "Time",
+    "Timestamp",
+    "DateFromTicks",
+    "TimeFromTicks",
+    "TimestampFromTicks",
+    "DataError",
+    "DatabaseError",
+    "Error",
+    "FIELD_TYPE",
+    "IntegrityError",
+    "InterfaceError",
+    "InternalError",
+    "MySQLError",
+    "NULL",
+    "NUMBER",
+    "NotSupportedError",
+    "DBAPISet",
+    "OperationalError",
+    "ProgrammingError",
+    "ROWID",
+    "STRING",
+    "TIME",
+    "TIMESTAMP",
+    "Warning",
+    "apilevel",
+    "connect",
+    "connections",
+    "constants",
+    "converters",
+    "cursors",
+    "get_client_info",
+    "paramstyle",
+    "threadsafety",
+    "version_info",
+    "install_as_MySQLdb",
+    "__version__",
+]

BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/__init__.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/_auth.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/charset.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/connections.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/converters.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/cursors.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/err.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/optionfile.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/protocol.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/__pycache__/times.cpython-38.pyc


+ 266 - 0
ambt-preden-monthlymigration-dev/package/pymysql/_auth.py

@@ -0,0 +1,266 @@
+"""
+Implements auth methods
+"""
+from .err import OperationalError
+
+
+try:
+    from cryptography.hazmat.backends import default_backend
+    from cryptography.hazmat.primitives import serialization, hashes
+    from cryptography.hazmat.primitives.asymmetric import padding
+
+    _have_cryptography = True
+except ImportError:
+    _have_cryptography = False
+
+from functools import partial
+import hashlib
+
+
+DEBUG = False
+SCRAMBLE_LENGTH = 20
+sha1_new = partial(hashlib.new, "sha1")
+
+
+# mysql_native_password
+# https://dev.mysql.com/doc/internals/en/secure-password-authentication.html#packet-Authentication::Native41
+
+
+def scramble_native_password(password, message):
+    """Scramble used for mysql_native_password"""
+    if not password:
+        return b""
+
+    stage1 = sha1_new(password).digest()
+    stage2 = sha1_new(stage1).digest()
+    s = sha1_new()
+    s.update(message[:SCRAMBLE_LENGTH])
+    s.update(stage2)
+    result = s.digest()
+    return _my_crypt(result, stage1)
+
+
+def _my_crypt(message1, message2):
+    result = bytearray(message1)
+
+    for i in range(len(result)):
+        result[i] ^= message2[i]
+
+    return bytes(result)
+
+
+# MariaDB's client_ed25519-plugin
+# https://mariadb.com/kb/en/library/connection/#client_ed25519-plugin
+
+_nacl_bindings = False
+
+
+def _init_nacl():
+    global _nacl_bindings
+    try:
+        from nacl import bindings
+
+        _nacl_bindings = bindings
+    except ImportError:
+        raise RuntimeError(
+            "'pynacl' package is required for ed25519_password auth method"
+        )
+
+
+def _scalar_clamp(s32):
+    ba = bytearray(s32)
+    ba0 = bytes(bytearray([ba[0] & 248]))
+    ba31 = bytes(bytearray([(ba[31] & 127) | 64]))
+    return ba0 + bytes(s32[1:31]) + ba31
+
+
+def ed25519_password(password, scramble):
+    """Sign a random scramble with elliptic curve Ed25519.
+
+    Secret and public key are derived from password.
+    """
+    # variable names based on rfc8032 section-5.1.6
+    #
+    if not _nacl_bindings:
+        _init_nacl()
+
+    # h = SHA512(password)
+    h = hashlib.sha512(password).digest()
+
+    # s = prune(first_half(h))
+    s = _scalar_clamp(h[:32])
+
+    # r = SHA512(second_half(h) || M)
+    r = hashlib.sha512(h[32:] + scramble).digest()
+
+    # R = encoded point [r]B
+    r = _nacl_bindings.crypto_core_ed25519_scalar_reduce(r)
+    R = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(r)
+
+    # A = encoded point [s]B
+    A = _nacl_bindings.crypto_scalarmult_ed25519_base_noclamp(s)
+
+    # k = SHA512(R || A || M)
+    k = hashlib.sha512(R + A + scramble).digest()
+
+    # S = (k * s + r) mod L
+    k = _nacl_bindings.crypto_core_ed25519_scalar_reduce(k)
+    ks = _nacl_bindings.crypto_core_ed25519_scalar_mul(k, s)
+    S = _nacl_bindings.crypto_core_ed25519_scalar_add(ks, r)
+
+    # signature = R || S
+    return R + S
+
+
+# sha256_password
+
+
+def _roundtrip(conn, send_data):
+    conn.write_packet(send_data)
+    pkt = conn._read_packet()
+    pkt.check_error()
+    return pkt
+
+
+def _xor_password(password, salt):
+    # Trailing NUL character will be added in Auth Switch Request.
+    # See https://github.com/mysql/mysql-server/blob/7d10c82196c8e45554f27c00681474a9fb86d137/sql/auth/sha2_password.cc#L939-L945
+    salt = salt[:SCRAMBLE_LENGTH]
+    password_bytes = bytearray(password)
+    # salt = bytearray(salt)  # for PY2 compat.
+    salt_len = len(salt)
+    for i in range(len(password_bytes)):
+        password_bytes[i] ^= salt[i % salt_len]
+    return bytes(password_bytes)
+
+
+def sha2_rsa_encrypt(password, salt, public_key):
+    """Encrypt password with salt and public_key.
+
+    Used for sha256_password and caching_sha2_password.
+    """
+    if not _have_cryptography:
+        raise RuntimeError(
+            "'cryptography' package is required for sha256_password or caching_sha2_password auth methods"
+        )
+    message = _xor_password(password + b"\0", salt)
+    rsa_key = serialization.load_pem_public_key(public_key, default_backend())
+    return rsa_key.encrypt(
+        message,
+        padding.OAEP(
+            mgf=padding.MGF1(algorithm=hashes.SHA1()),
+            algorithm=hashes.SHA1(),
+            label=None,
+        ),
+    )
+
+
+def sha256_password_auth(conn, pkt):
+    if conn._secure:
+        if DEBUG:
+            print("sha256: Sending plain password")
+        data = conn.password + b"\0"
+        return _roundtrip(conn, data)
+
+    if pkt.is_auth_switch_request():
+        conn.salt = pkt.read_all()
+        if not conn.server_public_key and conn.password:
+            # Request server public key
+            if DEBUG:
+                print("sha256: Requesting server public key")
+            pkt = _roundtrip(conn, b"\1")
+
+    if pkt.is_extra_auth_data():
+        conn.server_public_key = pkt._data[1:]
+        if DEBUG:
+            print("Received public key:\n", conn.server_public_key.decode("ascii"))
+
+    if conn.password:
+        if not conn.server_public_key:
+            raise OperationalError("Couldn't receive server's public key")
+
+        data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
+    else:
+        data = b""
+
+    return _roundtrip(conn, data)
+
+
+def scramble_caching_sha2(password, nonce):
+    # (bytes, bytes) -> bytes
+    """Scramble algorithm used in cached_sha2_password fast path.
+
+    XOR(SHA256(password), SHA256(SHA256(SHA256(password)), nonce))
+    """
+    if not password:
+        return b""
+
+    p1 = hashlib.sha256(password).digest()
+    p2 = hashlib.sha256(p1).digest()
+    p3 = hashlib.sha256(p2 + nonce).digest()
+
+    res = bytearray(p1)
+    for i in range(len(p3)):
+        res[i] ^= p3[i]
+
+    return bytes(res)
+
+
+def caching_sha2_password_auth(conn, pkt):
+    # No password fast path
+    if not conn.password:
+        return _roundtrip(conn, b"")
+
+    if pkt.is_auth_switch_request():
+        # Try from fast auth
+        if DEBUG:
+            print("caching sha2: Trying fast path")
+        conn.salt = pkt.read_all()
+        scrambled = scramble_caching_sha2(conn.password, conn.salt)
+        pkt = _roundtrip(conn, scrambled)
+    # else: fast auth is tried in initial handshake
+
+    if not pkt.is_extra_auth_data():
+        raise OperationalError(
+            "caching sha2: Unknown packet for fast auth: %s" % pkt._data[:1]
+        )
+
+    # magic numbers:
+    # 2 - request public key
+    # 3 - fast auth succeeded
+    # 4 - need full auth
+
+    pkt.advance(1)
+    n = pkt.read_uint8()
+
+    if n == 3:
+        if DEBUG:
+            print("caching sha2: succeeded by fast path.")
+        pkt = conn._read_packet()
+        pkt.check_error()  # pkt must be OK packet
+        return pkt
+
+    if n != 4:
+        raise OperationalError("caching sha2: Unknwon result for fast auth: %s" % n)
+
+    if DEBUG:
+        print("caching sha2: Trying full auth...")
+
+    if conn._secure:
+        if DEBUG:
+            print("caching sha2: Sending plain password via secure connection")
+        return _roundtrip(conn, conn.password + b"\0")
+
+    if not conn.server_public_key:
+        pkt = _roundtrip(conn, b"\x02")  # Request public key
+        if not pkt.is_extra_auth_data():
+            raise OperationalError(
+                "caching sha2: Unknown packet for public key: %s" % pkt._data[:1]
+            )
+
+        conn.server_public_key = pkt._data[1:]
+        if DEBUG:
+            print(conn.server_public_key.decode("ascii"))
+
+    data = sha2_rsa_encrypt(conn.password, conn.salt, conn.server_public_key)
+    pkt = _roundtrip(conn, data)

+ 209 - 0
ambt-preden-monthlymigration-dev/package/pymysql/charset.py

@@ -0,0 +1,209 @@
+MBLENGTH = {8: 1, 33: 3, 88: 2, 91: 2}
+
+
+class Charset:
+    def __init__(self, id, name, collation, is_default):
+        self.id, self.name, self.collation = id, name, collation
+        self.is_default = is_default == "Yes"
+
+    def __repr__(self):
+        return "Charset(id=%s, name=%r, collation=%r)" % (
+            self.id,
+            self.name,
+            self.collation,
+        )
+
+    @property
+    def encoding(self):
+        name = self.name
+        if name in ("utf8mb4", "utf8mb3"):
+            return "utf8"
+        if name == "latin1":
+            return "cp1252"
+        if name == "koi8r":
+            return "koi8_r"
+        if name == "koi8u":
+            return "koi8_u"
+        return name
+
+    @property
+    def is_binary(self):
+        return self.id == 63
+
+
+class Charsets:
+    def __init__(self):
+        self._by_id = {}
+        self._by_name = {}
+
+    def add(self, c):
+        self._by_id[c.id] = c
+        if c.is_default:
+            self._by_name[c.name] = c
+
+    def by_id(self, id):
+        return self._by_id[id]
+
+    def by_name(self, name):
+        return self._by_name.get(name.lower())
+
+
+_charsets = Charsets()
+"""
+Generated with:
+
+mysql -N -s -e "select id, character_set_name, collation_name, is_default
+from information_schema.collations order by id;" | python -c "import sys
+for l in sys.stdin.readlines():
+        id, name, collation, is_default  = l.split(chr(9))
+        print '_charsets.add(Charset(%s, \'%s\', \'%s\', \'%s\'))' \
+                % (id, name, collation, is_default.strip())
+"
+
+"""
+_charsets.add(Charset(1, "big5", "big5_chinese_ci", "Yes"))
+_charsets.add(Charset(2, "latin2", "latin2_czech_cs", ""))
+_charsets.add(Charset(3, "dec8", "dec8_swedish_ci", "Yes"))
+_charsets.add(Charset(4, "cp850", "cp850_general_ci", "Yes"))
+_charsets.add(Charset(5, "latin1", "latin1_german1_ci", ""))
+_charsets.add(Charset(6, "hp8", "hp8_english_ci", "Yes"))
+_charsets.add(Charset(7, "koi8r", "koi8r_general_ci", "Yes"))
+_charsets.add(Charset(8, "latin1", "latin1_swedish_ci", "Yes"))
+_charsets.add(Charset(9, "latin2", "latin2_general_ci", "Yes"))
+_charsets.add(Charset(10, "swe7", "swe7_swedish_ci", "Yes"))
+_charsets.add(Charset(11, "ascii", "ascii_general_ci", "Yes"))
+_charsets.add(Charset(12, "ujis", "ujis_japanese_ci", "Yes"))
+_charsets.add(Charset(13, "sjis", "sjis_japanese_ci", "Yes"))
+_charsets.add(Charset(14, "cp1251", "cp1251_bulgarian_ci", ""))
+_charsets.add(Charset(15, "latin1", "latin1_danish_ci", ""))
+_charsets.add(Charset(16, "hebrew", "hebrew_general_ci", "Yes"))
+_charsets.add(Charset(18, "tis620", "tis620_thai_ci", "Yes"))
+_charsets.add(Charset(19, "euckr", "euckr_korean_ci", "Yes"))
+_charsets.add(Charset(20, "latin7", "latin7_estonian_cs", ""))
+_charsets.add(Charset(21, "latin2", "latin2_hungarian_ci", ""))
+_charsets.add(Charset(22, "koi8u", "koi8u_general_ci", "Yes"))
+_charsets.add(Charset(23, "cp1251", "cp1251_ukrainian_ci", ""))
+_charsets.add(Charset(24, "gb2312", "gb2312_chinese_ci", "Yes"))
+_charsets.add(Charset(25, "greek", "greek_general_ci", "Yes"))
+_charsets.add(Charset(26, "cp1250", "cp1250_general_ci", "Yes"))
+_charsets.add(Charset(27, "latin2", "latin2_croatian_ci", ""))
+_charsets.add(Charset(28, "gbk", "gbk_chinese_ci", "Yes"))
+_charsets.add(Charset(29, "cp1257", "cp1257_lithuanian_ci", ""))
+_charsets.add(Charset(30, "latin5", "latin5_turkish_ci", "Yes"))
+_charsets.add(Charset(31, "latin1", "latin1_german2_ci", ""))
+_charsets.add(Charset(32, "armscii8", "armscii8_general_ci", "Yes"))
+_charsets.add(Charset(33, "utf8", "utf8_general_ci", "Yes"))
+_charsets.add(Charset(34, "cp1250", "cp1250_czech_cs", ""))
+_charsets.add(Charset(36, "cp866", "cp866_general_ci", "Yes"))
+_charsets.add(Charset(37, "keybcs2", "keybcs2_general_ci", "Yes"))
+_charsets.add(Charset(38, "macce", "macce_general_ci", "Yes"))
+_charsets.add(Charset(39, "macroman", "macroman_general_ci", "Yes"))
+_charsets.add(Charset(40, "cp852", "cp852_general_ci", "Yes"))
+_charsets.add(Charset(41, "latin7", "latin7_general_ci", "Yes"))
+_charsets.add(Charset(42, "latin7", "latin7_general_cs", ""))
+_charsets.add(Charset(43, "macce", "macce_bin", ""))
+_charsets.add(Charset(44, "cp1250", "cp1250_croatian_ci", ""))
+_charsets.add(Charset(45, "utf8mb4", "utf8mb4_general_ci", "Yes"))
+_charsets.add(Charset(46, "utf8mb4", "utf8mb4_bin", ""))
+_charsets.add(Charset(47, "latin1", "latin1_bin", ""))
+_charsets.add(Charset(48, "latin1", "latin1_general_ci", ""))
+_charsets.add(Charset(49, "latin1", "latin1_general_cs", ""))
+_charsets.add(Charset(50, "cp1251", "cp1251_bin", ""))
+_charsets.add(Charset(51, "cp1251", "cp1251_general_ci", "Yes"))
+_charsets.add(Charset(52, "cp1251", "cp1251_general_cs", ""))
+_charsets.add(Charset(53, "macroman", "macroman_bin", ""))
+_charsets.add(Charset(57, "cp1256", "cp1256_general_ci", "Yes"))
+_charsets.add(Charset(58, "cp1257", "cp1257_bin", ""))
+_charsets.add(Charset(59, "cp1257", "cp1257_general_ci", "Yes"))
+_charsets.add(Charset(63, "binary", "binary", "Yes"))
+_charsets.add(Charset(64, "armscii8", "armscii8_bin", ""))
+_charsets.add(Charset(65, "ascii", "ascii_bin", ""))
+_charsets.add(Charset(66, "cp1250", "cp1250_bin", ""))
+_charsets.add(Charset(67, "cp1256", "cp1256_bin", ""))
+_charsets.add(Charset(68, "cp866", "cp866_bin", ""))
+_charsets.add(Charset(69, "dec8", "dec8_bin", ""))
+_charsets.add(Charset(70, "greek", "greek_bin", ""))
+_charsets.add(Charset(71, "hebrew", "hebrew_bin", ""))
+_charsets.add(Charset(72, "hp8", "hp8_bin", ""))
+_charsets.add(Charset(73, "keybcs2", "keybcs2_bin", ""))
+_charsets.add(Charset(74, "koi8r", "koi8r_bin", ""))
+_charsets.add(Charset(75, "koi8u", "koi8u_bin", ""))
+_charsets.add(Charset(76, "utf8", "utf8_tolower_ci", ""))
+_charsets.add(Charset(77, "latin2", "latin2_bin", ""))
+_charsets.add(Charset(78, "latin5", "latin5_bin", ""))
+_charsets.add(Charset(79, "latin7", "latin7_bin", ""))
+_charsets.add(Charset(80, "cp850", "cp850_bin", ""))
+_charsets.add(Charset(81, "cp852", "cp852_bin", ""))
+_charsets.add(Charset(82, "swe7", "swe7_bin", ""))
+_charsets.add(Charset(83, "utf8", "utf8_bin", ""))
+_charsets.add(Charset(84, "big5", "big5_bin", ""))
+_charsets.add(Charset(85, "euckr", "euckr_bin", ""))
+_charsets.add(Charset(86, "gb2312", "gb2312_bin", ""))
+_charsets.add(Charset(87, "gbk", "gbk_bin", ""))
+_charsets.add(Charset(88, "sjis", "sjis_bin", ""))
+_charsets.add(Charset(89, "tis620", "tis620_bin", ""))
+_charsets.add(Charset(91, "ujis", "ujis_bin", ""))
+_charsets.add(Charset(92, "geostd8", "geostd8_general_ci", "Yes"))
+_charsets.add(Charset(93, "geostd8", "geostd8_bin", ""))
+_charsets.add(Charset(94, "latin1", "latin1_spanish_ci", ""))
+_charsets.add(Charset(95, "cp932", "cp932_japanese_ci", "Yes"))
+_charsets.add(Charset(96, "cp932", "cp932_bin", ""))
+_charsets.add(Charset(97, "eucjpms", "eucjpms_japanese_ci", "Yes"))
+_charsets.add(Charset(98, "eucjpms", "eucjpms_bin", ""))
+_charsets.add(Charset(99, "cp1250", "cp1250_polish_ci", ""))
+_charsets.add(Charset(192, "utf8", "utf8_unicode_ci", ""))
+_charsets.add(Charset(193, "utf8", "utf8_icelandic_ci", ""))
+_charsets.add(Charset(194, "utf8", "utf8_latvian_ci", ""))
+_charsets.add(Charset(195, "utf8", "utf8_romanian_ci", ""))
+_charsets.add(Charset(196, "utf8", "utf8_slovenian_ci", ""))
+_charsets.add(Charset(197, "utf8", "utf8_polish_ci", ""))
+_charsets.add(Charset(198, "utf8", "utf8_estonian_ci", ""))
+_charsets.add(Charset(199, "utf8", "utf8_spanish_ci", ""))
+_charsets.add(Charset(200, "utf8", "utf8_swedish_ci", ""))
+_charsets.add(Charset(201, "utf8", "utf8_turkish_ci", ""))
+_charsets.add(Charset(202, "utf8", "utf8_czech_ci", ""))
+_charsets.add(Charset(203, "utf8", "utf8_danish_ci", ""))
+_charsets.add(Charset(204, "utf8", "utf8_lithuanian_ci", ""))
+_charsets.add(Charset(205, "utf8", "utf8_slovak_ci", ""))
+_charsets.add(Charset(206, "utf8", "utf8_spanish2_ci", ""))
+_charsets.add(Charset(207, "utf8", "utf8_roman_ci", ""))
+_charsets.add(Charset(208, "utf8", "utf8_persian_ci", ""))
+_charsets.add(Charset(209, "utf8", "utf8_esperanto_ci", ""))
+_charsets.add(Charset(210, "utf8", "utf8_hungarian_ci", ""))
+_charsets.add(Charset(211, "utf8", "utf8_sinhala_ci", ""))
+_charsets.add(Charset(212, "utf8", "utf8_german2_ci", ""))
+_charsets.add(Charset(213, "utf8", "utf8_croatian_ci", ""))
+_charsets.add(Charset(214, "utf8", "utf8_unicode_520_ci", ""))
+_charsets.add(Charset(215, "utf8", "utf8_vietnamese_ci", ""))
+_charsets.add(Charset(223, "utf8", "utf8_general_mysql500_ci", ""))
+_charsets.add(Charset(224, "utf8mb4", "utf8mb4_unicode_ci", ""))
+_charsets.add(Charset(225, "utf8mb4", "utf8mb4_icelandic_ci", ""))
+_charsets.add(Charset(226, "utf8mb4", "utf8mb4_latvian_ci", ""))
+_charsets.add(Charset(227, "utf8mb4", "utf8mb4_romanian_ci", ""))
+_charsets.add(Charset(228, "utf8mb4", "utf8mb4_slovenian_ci", ""))
+_charsets.add(Charset(229, "utf8mb4", "utf8mb4_polish_ci", ""))
+_charsets.add(Charset(230, "utf8mb4", "utf8mb4_estonian_ci", ""))
+_charsets.add(Charset(231, "utf8mb4", "utf8mb4_spanish_ci", ""))
+_charsets.add(Charset(232, "utf8mb4", "utf8mb4_swedish_ci", ""))
+_charsets.add(Charset(233, "utf8mb4", "utf8mb4_turkish_ci", ""))
+_charsets.add(Charset(234, "utf8mb4", "utf8mb4_czech_ci", ""))
+_charsets.add(Charset(235, "utf8mb4", "utf8mb4_danish_ci", ""))
+_charsets.add(Charset(236, "utf8mb4", "utf8mb4_lithuanian_ci", ""))
+_charsets.add(Charset(237, "utf8mb4", "utf8mb4_slovak_ci", ""))
+_charsets.add(Charset(238, "utf8mb4", "utf8mb4_spanish2_ci", ""))
+_charsets.add(Charset(239, "utf8mb4", "utf8mb4_roman_ci", ""))
+_charsets.add(Charset(240, "utf8mb4", "utf8mb4_persian_ci", ""))
+_charsets.add(Charset(241, "utf8mb4", "utf8mb4_esperanto_ci", ""))
+_charsets.add(Charset(242, "utf8mb4", "utf8mb4_hungarian_ci", ""))
+_charsets.add(Charset(243, "utf8mb4", "utf8mb4_sinhala_ci", ""))
+_charsets.add(Charset(244, "utf8mb4", "utf8mb4_german2_ci", ""))
+_charsets.add(Charset(245, "utf8mb4", "utf8mb4_croatian_ci", ""))
+_charsets.add(Charset(246, "utf8mb4", "utf8mb4_unicode_520_ci", ""))
+_charsets.add(Charset(247, "utf8mb4", "utf8mb4_vietnamese_ci", ""))
+_charsets.add(Charset(248, "gb18030", "gb18030_chinese_ci", "Yes"))
+_charsets.add(Charset(249, "gb18030", "gb18030_bin", ""))
+_charsets.add(Charset(250, "gb18030", "gb18030_unicode_520_ci", ""))
+_charsets.add(Charset(255, "utf8mb4", "utf8mb4_0900_ai_ci", ""))
+
+charset_by_name = _charsets.by_name
+charset_by_id = _charsets.by_id

+ 1367 - 0
ambt-preden-monthlymigration-dev/package/pymysql/connections.py

@@ -0,0 +1,1367 @@
+# Python implementation of the MySQL client-server protocol
+# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
+# Error codes:
+# https://dev.mysql.com/doc/refman/5.5/en/error-handling.html
+import errno
+import os
+import socket
+import struct
+import sys
+import traceback
+import warnings
+
+from . import _auth
+
+from .charset import charset_by_name, charset_by_id
+from .constants import CLIENT, COMMAND, CR, FIELD_TYPE, SERVER_STATUS
+from . import converters
+from .cursors import Cursor
+from .optionfile import Parser
+from .protocol import (
+    dump_packet,
+    MysqlPacket,
+    FieldDescriptorPacket,
+    OKPacketWrapper,
+    EOFPacketWrapper,
+    LoadLocalPacketWrapper,
+)
+from . import err, VERSION_STRING
+
+try:
+    import ssl
+
+    SSL_ENABLED = True
+except ImportError:
+    ssl = None
+    SSL_ENABLED = False
+
+try:
+    import getpass
+
+    DEFAULT_USER = getpass.getuser()
+    del getpass
+except (ImportError, KeyError):
+    # KeyError occurs when there's no entry in OS database for a current user.
+    DEFAULT_USER = None
+
+DEBUG = False
+
+TEXT_TYPES = {
+    FIELD_TYPE.BIT,
+    FIELD_TYPE.BLOB,
+    FIELD_TYPE.LONG_BLOB,
+    FIELD_TYPE.MEDIUM_BLOB,
+    FIELD_TYPE.STRING,
+    FIELD_TYPE.TINY_BLOB,
+    FIELD_TYPE.VAR_STRING,
+    FIELD_TYPE.VARCHAR,
+    FIELD_TYPE.GEOMETRY,
+}
+
+
+DEFAULT_CHARSET = "utf8mb4"
+
+MAX_PACKET_LEN = 2 ** 24 - 1
+
+
+def _pack_int24(n):
+    return struct.pack("<I", n)[:3]
+
+
+# https://dev.mysql.com/doc/internals/en/integer.html#packet-Protocol::LengthEncodedInteger
+def _lenenc_int(i):
+    if i < 0:
+        raise ValueError(
+            "Encoding %d is less than 0 - no representation in LengthEncodedInteger" % i
+        )
+    elif i < 0xFB:
+        return bytes([i])
+    elif i < (1 << 16):
+        return b"\xfc" + struct.pack("<H", i)
+    elif i < (1 << 24):
+        return b"\xfd" + struct.pack("<I", i)[:3]
+    elif i < (1 << 64):
+        return b"\xfe" + struct.pack("<Q", i)
+    else:
+        raise ValueError(
+            "Encoding %x is larger than %x - no representation in LengthEncodedInteger"
+            % (i, (1 << 64))
+        )
+
+
+class Connection:
+    """
+    Representation of a socket with a mysql server.
+
+    The proper way to get an instance of this class is to call
+    connect().
+
+    Establish a connection to the MySQL database. Accepts several
+    arguments:
+
+    :param host: Host where the database server is located
+    :param user: Username to log in as
+    :param password: Password to use.
+    :param database: Database to use, None to not use a particular one.
+    :param port: MySQL port to use, default is usually OK. (default: 3306)
+    :param bind_address: When the client has multiple network interfaces, specify
+        the interface from which to connect to the host. Argument can be
+        a hostname or an IP address.
+    :param unix_socket: Optionally, you can use a unix socket rather than TCP/IP.
+    :param read_timeout: The timeout for reading from the connection in seconds (default: None - no timeout)
+    :param write_timeout: The timeout for writing to the connection in seconds (default: None - no timeout)
+    :param charset: Charset you want to use.
+    :param sql_mode: Default SQL_MODE to use.
+    :param read_default_file:
+        Specifies  my.cnf file to read these parameters from under the [client] section.
+    :param conv:
+        Conversion dictionary to use instead of the default one.
+        This is used to provide custom marshalling and unmarshalling of types.
+        See converters.
+    :param use_unicode:
+        Whether or not to default to unicode strings.
+        This option defaults to true.
+    :param client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT.
+    :param cursorclass: Custom cursor class to use.
+    :param init_command: Initial SQL statement to run when connection is established.
+    :param connect_timeout: Timeout before throwing an exception when connecting.
+        (default: 10, min: 1, max: 31536000)
+    :param ssl:
+        A dict of arguments similar to mysql_ssl_set()'s parameters.
+    :param ssl_ca: Path to the file that contains a PEM-formatted CA certificate
+    :param ssl_cert: Path to the file that contains a PEM-formatted client certificate
+    :param ssl_disabled: A boolean value that disables usage of TLS
+    :param ssl_key: Path to the file that contains a PEM-formatted private key for the client certificate
+    :param ssl_verify_cert: Set to true to check the validity of server certificates
+    :param ssl_verify_identity: Set to true to check the server's identity
+    :param read_default_group: Group to read from in the configuration file.
+    :param autocommit: Autocommit mode. None means use server default. (default: False)
+    :param local_infile: Boolean to enable the use of LOAD DATA LOCAL command. (default: False)
+    :param max_allowed_packet: Max size of packet sent to server in bytes. (default: 16MB)
+        Only used to limit size of "LOAD LOCAL INFILE" data packet smaller than default (16KB).
+    :param defer_connect: Don't explicitly connect on construction - wait for connect call.
+        (default: False)
+    :param auth_plugin_map: A dict of plugin names to a class that processes that plugin.
+        The class will take the Connection object as the argument to the constructor.
+        The class needs an authenticate method taking an authentication packet as
+        an argument.  For the dialog plugin, a prompt(echo, prompt) method can be used
+        (if no authenticate method) for returning a string from the user. (experimental)
+    :param server_public_key: SHA256 authentication plugin public key value. (default: None)
+    :param binary_prefix: Add _binary prefix on bytes and bytearray. (default: False)
+    :param compress: Not supported
+    :param named_pipe: Not supported
+    :param db: **DEPRECATED** Alias for database.
+    :param passwd: **DEPRECATED** Alias for password.
+
+    See `Connection <https://www.python.org/dev/peps/pep-0249/#connection-objects>`_ in the
+    specification.
+    """
+
+    _sock = None
+    _auth_plugin_name = ""
+    _closed = False
+    _secure = False
+
+    def __init__(
+        self,
+        *,
+        user=None,  # The first four arguments is based on DB-API 2.0 recommendation.
+        password="",
+        host=None,
+        database=None,
+        unix_socket=None,
+        port=0,
+        charset="",
+        sql_mode=None,
+        read_default_file=None,
+        conv=None,
+        use_unicode=True,
+        client_flag=0,
+        cursorclass=Cursor,
+        init_command=None,
+        connect_timeout=10,
+        read_default_group=None,
+        autocommit=False,
+        local_infile=False,
+        max_allowed_packet=16 * 1024 * 1024,
+        defer_connect=False,
+        auth_plugin_map=None,
+        read_timeout=None,
+        write_timeout=None,
+        bind_address=None,
+        binary_prefix=False,
+        program_name=None,
+        server_public_key=None,
+        ssl=None,
+        ssl_ca=None,
+        ssl_cert=None,
+        ssl_disabled=None,
+        ssl_key=None,
+        ssl_verify_cert=None,
+        ssl_verify_identity=None,
+        compress=None,  # not supported
+        named_pipe=None,  # not supported
+        passwd=None,  # deprecated
+        db=None,  # deprecated
+    ):
+        if db is not None and database is None:
+            # We will raise warining in 2022 or later.
+            # See https://github.com/PyMySQL/PyMySQL/issues/939
+            # warnings.warn("'db' is deprecated, use 'database'", DeprecationWarning, 3)
+            database = db
+        if passwd is not None and not password:
+            # We will raise warining in 2022 or later.
+            # See https://github.com/PyMySQL/PyMySQL/issues/939
+            # warnings.warn(
+            #    "'passwd' is deprecated, use 'password'", DeprecationWarning, 3
+            # )
+            password = passwd
+
+        if compress or named_pipe:
+            raise NotImplementedError(
+                "compress and named_pipe arguments are not supported"
+            )
+
+        self._local_infile = bool(local_infile)
+        if self._local_infile:
+            client_flag |= CLIENT.LOCAL_FILES
+
+        if read_default_group and not read_default_file:
+            if sys.platform.startswith("win"):
+                read_default_file = "c:\\my.ini"
+            else:
+                read_default_file = "/etc/my.cnf"
+
+        if read_default_file:
+            if not read_default_group:
+                read_default_group = "client"
+
+            cfg = Parser()
+            cfg.read(os.path.expanduser(read_default_file))
+
+            def _config(key, arg):
+                if arg:
+                    return arg
+                try:
+                    return cfg.get(read_default_group, key)
+                except Exception:
+                    return arg
+
+            user = _config("user", user)
+            password = _config("password", password)
+            host = _config("host", host)
+            database = _config("database", database)
+            unix_socket = _config("socket", unix_socket)
+            port = int(_config("port", port))
+            bind_address = _config("bind-address", bind_address)
+            charset = _config("default-character-set", charset)
+            if not ssl:
+                ssl = {}
+            if isinstance(ssl, dict):
+                for key in ["ca", "capath", "cert", "key", "cipher"]:
+                    value = _config("ssl-" + key, ssl.get(key))
+                    if value:
+                        ssl[key] = value
+
+        self.ssl = False
+        if not ssl_disabled:
+            if ssl_ca or ssl_cert or ssl_key or ssl_verify_cert or ssl_verify_identity:
+                ssl = {
+                    "ca": ssl_ca,
+                    "check_hostname": bool(ssl_verify_identity),
+                    "verify_mode": ssl_verify_cert
+                    if ssl_verify_cert is not None
+                    else False,
+                }
+                if ssl_cert is not None:
+                    ssl["cert"] = ssl_cert
+                if ssl_key is not None:
+                    ssl["key"] = ssl_key
+            if ssl:
+                if not SSL_ENABLED:
+                    raise NotImplementedError("ssl module not found")
+                self.ssl = True
+                client_flag |= CLIENT.SSL
+                self.ctx = self._create_ssl_ctx(ssl)
+
+        self.host = host or "localhost"
+        self.port = port or 3306
+        if type(self.port) is not int:
+            raise ValueError("port should be of type int")
+        self.user = user or DEFAULT_USER
+        self.password = password or b""
+        if isinstance(self.password, str):
+            self.password = self.password.encode("latin1")
+        self.db = database
+        self.unix_socket = unix_socket
+        self.bind_address = bind_address
+        if not (0 < connect_timeout <= 31536000):
+            raise ValueError("connect_timeout should be >0 and <=31536000")
+        self.connect_timeout = connect_timeout or None
+        if read_timeout is not None and read_timeout <= 0:
+            raise ValueError("read_timeout should be > 0")
+        self._read_timeout = read_timeout
+        if write_timeout is not None and write_timeout <= 0:
+            raise ValueError("write_timeout should be > 0")
+        self._write_timeout = write_timeout
+
+        self.charset = charset or DEFAULT_CHARSET
+        self.use_unicode = use_unicode
+
+        self.encoding = charset_by_name(self.charset).encoding
+
+        client_flag |= CLIENT.CAPABILITIES
+        if self.db:
+            client_flag |= CLIENT.CONNECT_WITH_DB
+
+        self.client_flag = client_flag
+
+        self.cursorclass = cursorclass
+
+        self._result = None
+        self._affected_rows = 0
+        self.host_info = "Not connected"
+
+        # specified autocommit mode. None means use server default.
+        self.autocommit_mode = autocommit
+
+        if conv is None:
+            conv = converters.conversions
+
+        # Need for MySQLdb compatibility.
+        self.encoders = {k: v for (k, v) in conv.items() if type(k) is not int}
+        self.decoders = {k: v for (k, v) in conv.items() if type(k) is int}
+        self.sql_mode = sql_mode
+        self.init_command = init_command
+        self.max_allowed_packet = max_allowed_packet
+        self._auth_plugin_map = auth_plugin_map or {}
+        self._binary_prefix = binary_prefix
+        self.server_public_key = server_public_key
+
+        self._connect_attrs = {
+            "_client_name": "pymysql",
+            "_pid": str(os.getpid()),
+            "_client_version": VERSION_STRING,
+        }
+
+        if program_name:
+            self._connect_attrs["program_name"] = program_name
+
+        if defer_connect:
+            self._sock = None
+        else:
+            self.connect()
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc_info):
+        del exc_info
+        self.close()
+
+    def _create_ssl_ctx(self, sslp):
+        if isinstance(sslp, ssl.SSLContext):
+            return sslp
+        ca = sslp.get("ca")
+        capath = sslp.get("capath")
+        hasnoca = ca is None and capath is None
+        ctx = ssl.create_default_context(cafile=ca, capath=capath)
+        ctx.check_hostname = not hasnoca and sslp.get("check_hostname", True)
+        verify_mode_value = sslp.get("verify_mode")
+        if verify_mode_value is None:
+            ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
+        elif isinstance(verify_mode_value, bool):
+            ctx.verify_mode = ssl.CERT_REQUIRED if verify_mode_value else ssl.CERT_NONE
+        else:
+            if isinstance(verify_mode_value, str):
+                verify_mode_value = verify_mode_value.lower()
+            if verify_mode_value in ("none", "0", "false", "no"):
+                ctx.verify_mode = ssl.CERT_NONE
+            elif verify_mode_value == "optional":
+                ctx.verify_mode = ssl.CERT_OPTIONAL
+            elif verify_mode_value in ("required", "1", "true", "yes"):
+                ctx.verify_mode = ssl.CERT_REQUIRED
+            else:
+                ctx.verify_mode = ssl.CERT_NONE if hasnoca else ssl.CERT_REQUIRED
+        if "cert" in sslp:
+            ctx.load_cert_chain(sslp["cert"], keyfile=sslp.get("key"))
+        if "cipher" in sslp:
+            ctx.set_ciphers(sslp["cipher"])
+        ctx.options |= ssl.OP_NO_SSLv2
+        ctx.options |= ssl.OP_NO_SSLv3
+        return ctx
+
+    def close(self):
+        """
+        Send the quit message and close the socket.
+
+        See `Connection.close() <https://www.python.org/dev/peps/pep-0249/#Connection.close>`_
+        in the specification.
+
+        :raise Error: If the connection is already closed.
+        """
+        if self._closed:
+            raise err.Error("Already closed")
+        self._closed = True
+        if self._sock is None:
+            return
+        send_data = struct.pack("<iB", 1, COMMAND.COM_QUIT)
+        try:
+            self._write_bytes(send_data)
+        except Exception:
+            pass
+        finally:
+            self._force_close()
+
+    @property
+    def open(self):
+        """Return True if the connection is open"""
+        return self._sock is not None
+
+    def _force_close(self):
+        """Close connection without QUIT message"""
+        if self._sock:
+            try:
+                self._sock.close()
+            except:  # noqa
+                pass
+        self._sock = None
+        self._rfile = None
+
+    __del__ = _force_close
+
+    def autocommit(self, value):
+        self.autocommit_mode = bool(value)
+        current = self.get_autocommit()
+        if value != current:
+            self._send_autocommit_mode()
+
+    def get_autocommit(self):
+        return bool(self.server_status & SERVER_STATUS.SERVER_STATUS_AUTOCOMMIT)
+
+    def _read_ok_packet(self):
+        pkt = self._read_packet()
+        if not pkt.is_ok_packet():
+            raise err.OperationalError(2014, "Command Out of Sync")
+        ok = OKPacketWrapper(pkt)
+        self.server_status = ok.server_status
+        return ok
+
+    def _send_autocommit_mode(self):
+        """Set whether or not to commit after every execute()"""
+        self._execute_command(
+            COMMAND.COM_QUERY, "SET AUTOCOMMIT = %s" % self.escape(self.autocommit_mode)
+        )
+        self._read_ok_packet()
+
+    def begin(self):
+        """Begin transaction."""
+        self._execute_command(COMMAND.COM_QUERY, "BEGIN")
+        self._read_ok_packet()
+
+    def commit(self):
+        """
+        Commit changes to stable storage.
+
+        See `Connection.commit() <https://www.python.org/dev/peps/pep-0249/#commit>`_
+        in the specification.
+        """
+        self._execute_command(COMMAND.COM_QUERY, "COMMIT")
+        self._read_ok_packet()
+
+    def rollback(self):
+        """
+        Roll back the current transaction.
+
+        See `Connection.rollback() <https://www.python.org/dev/peps/pep-0249/#rollback>`_
+        in the specification.
+        """
+        self._execute_command(COMMAND.COM_QUERY, "ROLLBACK")
+        self._read_ok_packet()
+
+    def show_warnings(self):
+        """Send the "SHOW WARNINGS" SQL command."""
+        self._execute_command(COMMAND.COM_QUERY, "SHOW WARNINGS")
+        result = MySQLResult(self)
+        result.read()
+        return result.rows
+
+    def select_db(self, db):
+        """
+        Set current db.
+
+        :param db: The name of the db.
+        """
+        self._execute_command(COMMAND.COM_INIT_DB, db)
+        self._read_ok_packet()
+
+    def escape(self, obj, mapping=None):
+        """Escape whatever value you pass to it.
+
+        Non-standard, for internal use; do not use this in your applications.
+        """
+        if isinstance(obj, str):
+            return "'" + self.escape_string(obj) + "'"
+        if isinstance(obj, (bytes, bytearray)):
+            ret = self._quote_bytes(obj)
+            if self._binary_prefix:
+                ret = "_binary" + ret
+            return ret
+        return converters.escape_item(obj, self.charset, mapping=mapping)
+
+    def literal(self, obj):
+        """Alias for escape()
+
+        Non-standard, for internal use; do not use this in your applications.
+        """
+        return self.escape(obj, self.encoders)
+
+    def escape_string(self, s):
+        if self.server_status & SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES:
+            return s.replace("'", "''")
+        return converters.escape_string(s)
+
+    def _quote_bytes(self, s):
+        if self.server_status & SERVER_STATUS.SERVER_STATUS_NO_BACKSLASH_ESCAPES:
+            return "'%s'" % (s.replace(b"'", b"''").decode("ascii", "surrogateescape"),)
+        return converters.escape_bytes(s)
+
+    def cursor(self, cursor=None):
+        """
+        Create a new cursor to execute queries with.
+
+        :param cursor: The type of cursor to create; one of :py:class:`Cursor`,
+            :py:class:`SSCursor`, :py:class:`DictCursor`, or :py:class:`SSDictCursor`.
+            None means use Cursor.
+        """
+        if cursor:
+            return cursor(self)
+        return self.cursorclass(self)
+
+    # The following methods are INTERNAL USE ONLY (called from Cursor)
+    def query(self, sql, unbuffered=False):
+        # if DEBUG:
+        #     print("DEBUG: sending query:", sql)
+        if isinstance(sql, str):
+            sql = sql.encode(self.encoding, "surrogateescape")
+        self._execute_command(COMMAND.COM_QUERY, sql)
+        self._affected_rows = self._read_query_result(unbuffered=unbuffered)
+        return self._affected_rows
+
+    def next_result(self, unbuffered=False):
+        self._affected_rows = self._read_query_result(unbuffered=unbuffered)
+        return self._affected_rows
+
+    def affected_rows(self):
+        return self._affected_rows
+
+    def kill(self, thread_id):
+        arg = struct.pack("<I", thread_id)
+        self._execute_command(COMMAND.COM_PROCESS_KILL, arg)
+        return self._read_ok_packet()
+
+    def ping(self, reconnect=True):
+        """
+        Check if the server is alive.
+
+        :param reconnect: If the connection is closed, reconnect.
+        :raise Error: If the connection is closed and reconnect=False.
+        """
+        if self._sock is None:
+            if reconnect:
+                self.connect()
+                reconnect = False
+            else:
+                raise err.Error("Already closed")
+        try:
+            self._execute_command(COMMAND.COM_PING, "")
+            self._read_ok_packet()
+        except Exception:
+            if reconnect:
+                self.connect()
+                self.ping(False)
+            else:
+                raise
+
+    def set_charset(self, charset):
+        # Make sure charset is supported.
+        encoding = charset_by_name(charset).encoding
+
+        self._execute_command(COMMAND.COM_QUERY, "SET NAMES %s" % self.escape(charset))
+        self._read_packet()
+        self.charset = charset
+        self.encoding = encoding
+
+    def connect(self, sock=None):
+        self._closed = False
+        try:
+            if sock is None:
+                if self.unix_socket:
+                    sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+                    sock.settimeout(self.connect_timeout)
+                    sock.connect(self.unix_socket)
+                    self.host_info = "Localhost via UNIX socket"
+                    self._secure = True
+                    if DEBUG:
+                        print("connected using unix_socket")
+                else:
+                    kwargs = {}
+                    if self.bind_address is not None:
+                        kwargs["source_address"] = (self.bind_address, 0)
+                    while True:
+                        try:
+                            sock = socket.create_connection(
+                                (self.host, self.port), self.connect_timeout, **kwargs
+                            )
+                            break
+                        except (OSError, IOError) as e:
+                            if e.errno == errno.EINTR:
+                                continue
+                            raise
+                    self.host_info = "socket %s:%d" % (self.host, self.port)
+                    if DEBUG:
+                        print("connected using socket")
+                    sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+                    sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1)
+                sock.settimeout(None)
+
+            self._sock = sock
+            self._rfile = sock.makefile("rb")
+            self._next_seq_id = 0
+
+            self._get_server_information()
+            self._request_authentication()
+
+            if self.sql_mode is not None:
+                c = self.cursor()
+                c.execute("SET sql_mode=%s", (self.sql_mode,))
+
+            if self.init_command is not None:
+                c = self.cursor()
+                c.execute(self.init_command)
+                c.close()
+                self.commit()
+
+            if self.autocommit_mode is not None:
+                self.autocommit(self.autocommit_mode)
+        except BaseException as e:
+            self._rfile = None
+            if sock is not None:
+                try:
+                    sock.close()
+                except:  # noqa
+                    pass
+
+            if isinstance(e, (OSError, IOError, socket.error)):
+                exc = err.OperationalError(
+                    2003, "Can't connect to MySQL server on %r (%s)" % (self.host, e)
+                )
+                # Keep original exception and traceback to investigate error.
+                exc.original_exception = e
+                exc.traceback = traceback.format_exc()
+                if DEBUG:
+                    print(exc.traceback)
+                raise exc
+
+            # If e is neither DatabaseError or IOError, It's a bug.
+            # But raising AssertionError hides original error.
+            # So just reraise it.
+            raise
+
+    def write_packet(self, payload):
+        """Writes an entire "mysql packet" in its entirety to the network
+        adding its length and sequence number.
+        """
+        # Internal note: when you build packet manually and calls _write_bytes()
+        # directly, you should set self._next_seq_id properly.
+        data = _pack_int24(len(payload)) + bytes([self._next_seq_id]) + payload
+        if DEBUG:
+            dump_packet(data)
+        self._write_bytes(data)
+        self._next_seq_id = (self._next_seq_id + 1) % 256
+
+    def _read_packet(self, packet_type=MysqlPacket):
+        """Read an entire "mysql packet" in its entirety from the network
+        and return a MysqlPacket type that represents the results.
+
+        :raise OperationalError: If the connection to the MySQL server is lost.
+        :raise InternalError: If the packet sequence number is wrong.
+        """
+        buff = bytearray()
+        while True:
+            packet_header = self._read_bytes(4)
+            # if DEBUG: dump_packet(packet_header)
+
+            btrl, btrh, packet_number = struct.unpack("<HBB", packet_header)
+            bytes_to_read = btrl + (btrh << 16)
+            if packet_number != self._next_seq_id:
+                self._force_close()
+                if packet_number == 0:
+                    # MariaDB sends error packet with seqno==0 when shutdown
+                    raise err.OperationalError(
+                        CR.CR_SERVER_LOST,
+                        "Lost connection to MySQL server during query",
+                    )
+                raise err.InternalError(
+                    "Packet sequence number wrong - got %d expected %d"
+                    % (packet_number, self._next_seq_id)
+                )
+            self._next_seq_id = (self._next_seq_id + 1) % 256
+
+            recv_data = self._read_bytes(bytes_to_read)
+            if DEBUG:
+                dump_packet(recv_data)
+            buff += recv_data
+            # https://dev.mysql.com/doc/internals/en/sending-more-than-16mbyte.html
+            if bytes_to_read == 0xFFFFFF:
+                continue
+            if bytes_to_read < MAX_PACKET_LEN:
+                break
+
+        packet = packet_type(bytes(buff), self.encoding)
+        if packet.is_error_packet():
+            if self._result is not None and self._result.unbuffered_active is True:
+                self._result.unbuffered_active = False
+            packet.raise_for_error()
+        return packet
+
+    def _read_bytes(self, num_bytes):
+        self._sock.settimeout(self._read_timeout)
+        while True:
+            try:
+                data = self._rfile.read(num_bytes)
+                break
+            except (IOError, OSError) as e:
+                if e.errno == errno.EINTR:
+                    continue
+                self._force_close()
+                raise err.OperationalError(
+                    CR.CR_SERVER_LOST,
+                    "Lost connection to MySQL server during query (%s)" % (e,),
+                )
+            except BaseException:
+                # Don't convert unknown exception to MySQLError.
+                self._force_close()
+                raise
+        if len(data) < num_bytes:
+            self._force_close()
+            raise err.OperationalError(
+                CR.CR_SERVER_LOST, "Lost connection to MySQL server during query"
+            )
+        return data
+
+    def _write_bytes(self, data):
+        self._sock.settimeout(self._write_timeout)
+        try:
+            self._sock.sendall(data)
+        except IOError as e:
+            self._force_close()
+            raise err.OperationalError(
+                CR.CR_SERVER_GONE_ERROR, "MySQL server has gone away (%r)" % (e,)
+            )
+
+    def _read_query_result(self, unbuffered=False):
+        self._result = None
+        if unbuffered:
+            try:
+                result = MySQLResult(self)
+                result.init_unbuffered_query()
+            except:
+                result.unbuffered_active = False
+                result.connection = None
+                raise
+        else:
+            result = MySQLResult(self)
+            result.read()
+        self._result = result
+        if result.server_status is not None:
+            self.server_status = result.server_status
+        return result.affected_rows
+
+    def insert_id(self):
+        if self._result:
+            return self._result.insert_id
+        else:
+            return 0
+
+    def _execute_command(self, command, sql):
+        """
+        :raise InterfaceError: If the connection is closed.
+        :raise ValueError: If no username was specified.
+        """
+        if not self._sock:
+            raise err.InterfaceError(0, "")
+
+        # If the last query was unbuffered, make sure it finishes before
+        # sending new commands
+        if self._result is not None:
+            if self._result.unbuffered_active:
+                warnings.warn("Previous unbuffered result was left incomplete")
+                self._result._finish_unbuffered_query()
+            while self._result.has_next:
+                self.next_result()
+            self._result = None
+
+        if isinstance(sql, str):
+            sql = sql.encode(self.encoding)
+
+        packet_size = min(MAX_PACKET_LEN, len(sql) + 1)  # +1 is for command
+
+        # tiny optimization: build first packet manually instead of
+        # calling self..write_packet()
+        prelude = struct.pack("<iB", packet_size, command)
+        packet = prelude + sql[: packet_size - 1]
+        self._write_bytes(packet)
+        if DEBUG:
+            dump_packet(packet)
+        self._next_seq_id = 1
+
+        if packet_size < MAX_PACKET_LEN:
+            return
+
+        sql = sql[packet_size - 1 :]
+        while True:
+            packet_size = min(MAX_PACKET_LEN, len(sql))
+            self.write_packet(sql[:packet_size])
+            sql = sql[packet_size:]
+            if not sql and packet_size < MAX_PACKET_LEN:
+                break
+
+    def _request_authentication(self):
+        # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse
+        if int(self.server_version.split(".", 1)[0]) >= 5:
+            self.client_flag |= CLIENT.MULTI_RESULTS
+
+        if self.user is None:
+            raise ValueError("Did not specify a username")
+
+        charset_id = charset_by_name(self.charset).id
+        if isinstance(self.user, str):
+            self.user = self.user.encode(self.encoding)
+
+        data_init = struct.pack(
+            "<iIB23s", self.client_flag, MAX_PACKET_LEN, charset_id, b""
+        )
+
+        if self.ssl and self.server_capabilities & CLIENT.SSL:
+            self.write_packet(data_init)
+
+            self._sock = self.ctx.wrap_socket(self._sock, server_hostname=self.host)
+            self._rfile = self._sock.makefile("rb")
+            self._secure = True
+
+        data = data_init + self.user + b"\0"
+
+        authresp = b""
+        plugin_name = None
+
+        if self._auth_plugin_name == "":
+            plugin_name = b""
+            authresp = _auth.scramble_native_password(self.password, self.salt)
+        elif self._auth_plugin_name == "mysql_native_password":
+            plugin_name = b"mysql_native_password"
+            authresp = _auth.scramble_native_password(self.password, self.salt)
+        elif self._auth_plugin_name == "caching_sha2_password":
+            plugin_name = b"caching_sha2_password"
+            if self.password:
+                if DEBUG:
+                    print("caching_sha2: trying fast path")
+                authresp = _auth.scramble_caching_sha2(self.password, self.salt)
+            else:
+                if DEBUG:
+                    print("caching_sha2: empty password")
+        elif self._auth_plugin_name == "sha256_password":
+            plugin_name = b"sha256_password"
+            if self.ssl and self.server_capabilities & CLIENT.SSL:
+                authresp = self.password + b"\0"
+            elif self.password:
+                authresp = b"\1"  # request public key
+            else:
+                authresp = b"\0"  # empty password
+
+        if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA:
+            data += _lenenc_int(len(authresp)) + authresp
+        elif self.server_capabilities & CLIENT.SECURE_CONNECTION:
+            data += struct.pack("B", len(authresp)) + authresp
+        else:  # pragma: no cover - not testing against servers without secure auth (>=5.0)
+            data += authresp + b"\0"
+
+        if self.db and self.server_capabilities & CLIENT.CONNECT_WITH_DB:
+            if isinstance(self.db, str):
+                self.db = self.db.encode(self.encoding)
+            data += self.db + b"\0"
+
+        if self.server_capabilities & CLIENT.PLUGIN_AUTH:
+            data += (plugin_name or b"") + b"\0"
+
+        if self.server_capabilities & CLIENT.CONNECT_ATTRS:
+            connect_attrs = b""
+            for k, v in self._connect_attrs.items():
+                k = k.encode("utf-8")
+                connect_attrs += struct.pack("B", len(k)) + k
+                v = v.encode("utf-8")
+                connect_attrs += struct.pack("B", len(v)) + v
+            data += struct.pack("B", len(connect_attrs)) + connect_attrs
+
+        self.write_packet(data)
+        auth_packet = self._read_packet()
+
+        # if authentication method isn't accepted the first byte
+        # will have the octet 254
+        if auth_packet.is_auth_switch_request():
+            if DEBUG:
+                print("received auth switch")
+            # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
+            auth_packet.read_uint8()  # 0xfe packet identifier
+            plugin_name = auth_packet.read_string()
+            if (
+                self.server_capabilities & CLIENT.PLUGIN_AUTH
+                and plugin_name is not None
+            ):
+                auth_packet = self._process_auth(plugin_name, auth_packet)
+            else:
+                # send legacy handshake
+                data = _auth.scramble_old_password(self.password, self.salt) + b"\0"
+                self.write_packet(data)
+                auth_packet = self._read_packet()
+        elif auth_packet.is_extra_auth_data():
+            if DEBUG:
+                print("received extra data")
+            # https://dev.mysql.com/doc/internals/en/successful-authentication.html
+            if self._auth_plugin_name == "caching_sha2_password":
+                auth_packet = _auth.caching_sha2_password_auth(self, auth_packet)
+            elif self._auth_plugin_name == "sha256_password":
+                auth_packet = _auth.sha256_password_auth(self, auth_packet)
+            else:
+                raise err.OperationalError(
+                    "Received extra packet for auth method %r", self._auth_plugin_name
+                )
+
+        if DEBUG:
+            print("Succeed to auth")
+
+    def _process_auth(self, plugin_name, auth_packet):
+        handler = self._get_auth_plugin_handler(plugin_name)
+        if handler:
+            try:
+                return handler.authenticate(auth_packet)
+            except AttributeError:
+                if plugin_name != b"dialog":
+                    raise err.OperationalError(
+                        2059,
+                        "Authentication plugin '%s'"
+                        " not loaded: - %r missing authenticate method"
+                        % (plugin_name, type(handler)),
+                    )
+        if plugin_name == b"caching_sha2_password":
+            return _auth.caching_sha2_password_auth(self, auth_packet)
+        elif plugin_name == b"sha256_password":
+            return _auth.sha256_password_auth(self, auth_packet)
+        elif plugin_name == b"mysql_native_password":
+            data = _auth.scramble_native_password(self.password, auth_packet.read_all())
+        elif plugin_name == b"client_ed25519":
+            data = _auth.ed25519_password(self.password, auth_packet.read_all())
+        elif plugin_name == b"mysql_old_password":
+            data = (
+                _auth.scramble_old_password(self.password, auth_packet.read_all())
+                + b"\0"
+            )
+        elif plugin_name == b"mysql_clear_password":
+            # https://dev.mysql.com/doc/internals/en/clear-text-authentication.html
+            data = self.password + b"\0"
+        elif plugin_name == b"dialog":
+            pkt = auth_packet
+            while True:
+                flag = pkt.read_uint8()
+                echo = (flag & 0x06) == 0x02
+                last = (flag & 0x01) == 0x01
+                prompt = pkt.read_all()
+
+                if prompt == b"Password: ":
+                    self.write_packet(self.password + b"\0")
+                elif handler:
+                    resp = "no response - TypeError within plugin.prompt method"
+                    try:
+                        resp = handler.prompt(echo, prompt)
+                        self.write_packet(resp + b"\0")
+                    except AttributeError:
+                        raise err.OperationalError(
+                            2059,
+                            "Authentication plugin '%s'"
+                            " not loaded: - %r missing prompt method"
+                            % (plugin_name, handler),
+                        )
+                    except TypeError:
+                        raise err.OperationalError(
+                            2061,
+                            "Authentication plugin '%s'"
+                            " %r didn't respond with string. Returned '%r' to prompt %r"
+                            % (plugin_name, handler, resp, prompt),
+                        )
+                else:
+                    raise err.OperationalError(
+                        2059,
+                        "Authentication plugin '%s' (%r) not configured"
+                        % (plugin_name, handler),
+                    )
+                pkt = self._read_packet()
+                pkt.check_error()
+                if pkt.is_ok_packet() or last:
+                    break
+            return pkt
+        else:
+            raise err.OperationalError(
+                2059, "Authentication plugin '%s' not configured" % plugin_name
+            )
+
+        self.write_packet(data)
+        pkt = self._read_packet()
+        pkt.check_error()
+        return pkt
+
+    def _get_auth_plugin_handler(self, plugin_name):
+        plugin_class = self._auth_plugin_map.get(plugin_name)
+        if not plugin_class and isinstance(plugin_name, bytes):
+            plugin_class = self._auth_plugin_map.get(plugin_name.decode("ascii"))
+        if plugin_class:
+            try:
+                handler = plugin_class(self)
+            except TypeError:
+                raise err.OperationalError(
+                    2059,
+                    "Authentication plugin '%s'"
+                    " not loaded: - %r cannot be constructed with connection object"
+                    % (plugin_name, plugin_class),
+                )
+        else:
+            handler = None
+        return handler
+
+    # _mysql support
+    def thread_id(self):
+        return self.server_thread_id[0]
+
+    def character_set_name(self):
+        return self.charset
+
+    def get_host_info(self):
+        return self.host_info
+
+    def get_proto_info(self):
+        return self.protocol_version
+
+    def _get_server_information(self):
+        i = 0
+        packet = self._read_packet()
+        data = packet.get_all_data()
+
+        self.protocol_version = data[i]
+        i += 1
+
+        server_end = data.find(b"\0", i)
+        self.server_version = data[i:server_end].decode("latin1")
+        i = server_end + 1
+
+        self.server_thread_id = struct.unpack("<I", data[i : i + 4])
+        i += 4
+
+        self.salt = data[i : i + 8]
+        i += 9  # 8 + 1(filler)
+
+        self.server_capabilities = struct.unpack("<H", data[i : i + 2])[0]
+        i += 2
+
+        if len(data) >= i + 6:
+            lang, stat, cap_h, salt_len = struct.unpack("<BHHB", data[i : i + 6])
+            i += 6
+            # TODO: deprecate server_language and server_charset.
+            # mysqlclient-python doesn't provide it.
+            self.server_language = lang
+            try:
+                self.server_charset = charset_by_id(lang).name
+            except KeyError:
+                # unknown collation
+                self.server_charset = None
+
+            self.server_status = stat
+            if DEBUG:
+                print("server_status: %x" % stat)
+
+            self.server_capabilities |= cap_h << 16
+            if DEBUG:
+                print("salt_len:", salt_len)
+            salt_len = max(12, salt_len - 9)
+
+        # reserved
+        i += 10
+
+        if len(data) >= i + salt_len:
+            # salt_len includes auth_plugin_data_part_1 and filler
+            self.salt += data[i : i + salt_len]
+            i += salt_len
+
+        i += 1
+        # AUTH PLUGIN NAME may appear here.
+        if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i:
+            # Due to Bug#59453 the auth-plugin-name is missing the terminating
+            # NUL-char in versions prior to 5.5.10 and 5.6.2.
+            # ref: https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::Handshake
+            # didn't use version checks as mariadb is corrected and reports
+            # earlier than those two.
+            server_end = data.find(b"\0", i)
+            if server_end < 0:  # pragma: no cover - very specific upstream bug
+                # not found \0 and last field so take it all
+                self._auth_plugin_name = data[i:].decode("utf-8")
+            else:
+                self._auth_plugin_name = data[i:server_end].decode("utf-8")
+
+    def get_server_info(self):
+        return self.server_version
+
+    Warning = err.Warning
+    Error = err.Error
+    InterfaceError = err.InterfaceError
+    DatabaseError = err.DatabaseError
+    DataError = err.DataError
+    OperationalError = err.OperationalError
+    IntegrityError = err.IntegrityError
+    InternalError = err.InternalError
+    ProgrammingError = err.ProgrammingError
+    NotSupportedError = err.NotSupportedError
+
+
+class MySQLResult:
+    def __init__(self, connection):
+        """
+        :type connection: Connection
+        """
+        self.connection = connection
+        self.affected_rows = None
+        self.insert_id = None
+        self.server_status = None
+        self.warning_count = 0
+        self.message = None
+        self.field_count = 0
+        self.description = None
+        self.rows = None
+        self.has_next = None
+        self.unbuffered_active = False
+
+    def __del__(self):
+        if self.unbuffered_active:
+            self._finish_unbuffered_query()
+
+    def read(self):
+        try:
+            first_packet = self.connection._read_packet()
+
+            if first_packet.is_ok_packet():
+                self._read_ok_packet(first_packet)
+            elif first_packet.is_load_local_packet():
+                self._read_load_local_packet(first_packet)
+            else:
+                self._read_result_packet(first_packet)
+        finally:
+            self.connection = None
+
+    def init_unbuffered_query(self):
+        """
+        :raise OperationalError: If the connection to the MySQL server is lost.
+        :raise InternalError:
+        """
+        self.unbuffered_active = True
+        first_packet = self.connection._read_packet()
+
+        if first_packet.is_ok_packet():
+            self._read_ok_packet(first_packet)
+            self.unbuffered_active = False
+            self.connection = None
+        elif first_packet.is_load_local_packet():
+            self._read_load_local_packet(first_packet)
+            self.unbuffered_active = False
+            self.connection = None
+        else:
+            self.field_count = first_packet.read_length_encoded_integer()
+            self._get_descriptions()
+
+            # Apparently, MySQLdb picks this number because it's the maximum
+            # value of a 64bit unsigned integer. Since we're emulating MySQLdb,
+            # we set it to this instead of None, which would be preferred.
+            self.affected_rows = 18446744073709551615
+
+    def _read_ok_packet(self, first_packet):
+        ok_packet = OKPacketWrapper(first_packet)
+        self.affected_rows = ok_packet.affected_rows
+        self.insert_id = ok_packet.insert_id
+        self.server_status = ok_packet.server_status
+        self.warning_count = ok_packet.warning_count
+        self.message = ok_packet.message
+        self.has_next = ok_packet.has_next
+
+    def _read_load_local_packet(self, first_packet):
+        if not self.connection._local_infile:
+            raise RuntimeError(
+                "**WARN**: Received LOAD_LOCAL packet but local_infile option is false."
+            )
+        load_packet = LoadLocalPacketWrapper(first_packet)
+        sender = LoadLocalFile(load_packet.filename, self.connection)
+        try:
+            sender.send_data()
+        except:
+            self.connection._read_packet()  # skip ok packet
+            raise
+
+        ok_packet = self.connection._read_packet()
+        if (
+            not ok_packet.is_ok_packet()
+        ):  # pragma: no cover - upstream induced protocol error
+            raise err.OperationalError(2014, "Commands Out of Sync")
+        self._read_ok_packet(ok_packet)
+
+    def _check_packet_is_eof(self, packet):
+        if not packet.is_eof_packet():
+            return False
+        # TODO: Support CLIENT.DEPRECATE_EOF
+        # 1) Add DEPRECATE_EOF to CAPABILITIES
+        # 2) Mask CAPABILITIES with server_capabilities
+        # 3) if server_capabilities & CLIENT.DEPRECATE_EOF: use OKPacketWrapper instead of EOFPacketWrapper
+        wp = EOFPacketWrapper(packet)
+        self.warning_count = wp.warning_count
+        self.has_next = wp.has_next
+        return True
+
+    def _read_result_packet(self, first_packet):
+        self.field_count = first_packet.read_length_encoded_integer()
+        self._get_descriptions()
+        self._read_rowdata_packet()
+
+    def _read_rowdata_packet_unbuffered(self):
+        # Check if in an active query
+        if not self.unbuffered_active:
+            return
+
+        # EOF
+        packet = self.connection._read_packet()
+        if self._check_packet_is_eof(packet):
+            self.unbuffered_active = False
+            self.connection = None
+            self.rows = None
+            return
+
+        row = self._read_row_from_packet(packet)
+        self.affected_rows = 1
+        self.rows = (row,)  # rows should tuple of row for MySQL-python compatibility.
+        return row
+
+    def _finish_unbuffered_query(self):
+        # After much reading on the MySQL protocol, it appears that there is,
+        # in fact, no way to stop MySQL from sending all the data after
+        # executing a query, so we just spin, and wait for an EOF packet.
+        while self.unbuffered_active:
+            packet = self.connection._read_packet()
+            if self._check_packet_is_eof(packet):
+                self.unbuffered_active = False
+                self.connection = None  # release reference to kill cyclic reference.
+
+    def _read_rowdata_packet(self):
+        """Read a rowdata packet for each data row in the result set."""
+        rows = []
+        while True:
+            packet = self.connection._read_packet()
+            if self._check_packet_is_eof(packet):
+                self.connection = None  # release reference to kill cyclic reference.
+                break
+            rows.append(self._read_row_from_packet(packet))
+
+        self.affected_rows = len(rows)
+        self.rows = tuple(rows)
+
+    def _read_row_from_packet(self, packet):
+        row = []
+        for encoding, converter in self.converters:
+            try:
+                data = packet.read_length_coded_string()
+            except IndexError:
+                # No more columns in this row
+                # See https://github.com/PyMySQL/PyMySQL/pull/434
+                break
+            if data is not None:
+                if encoding is not None:
+                    data = data.decode(encoding)
+                if DEBUG:
+                    print("DEBUG: DATA = ", data)
+                if converter is not None:
+                    data = converter(data)
+            row.append(data)
+        return tuple(row)
+
+    def _get_descriptions(self):
+        """Read a column descriptor packet for each column in the result."""
+        self.fields = []
+        self.converters = []
+        use_unicode = self.connection.use_unicode
+        conn_encoding = self.connection.encoding
+        description = []
+
+        for i in range(self.field_count):
+            field = self.connection._read_packet(FieldDescriptorPacket)
+            self.fields.append(field)
+            description.append(field.description())
+            field_type = field.type_code
+            if use_unicode:
+                if field_type == FIELD_TYPE.JSON:
+                    # When SELECT from JSON column: charset = binary
+                    # When SELECT CAST(... AS JSON): charset = connection encoding
+                    # This behavior is different from TEXT / BLOB.
+                    # We should decode result by connection encoding regardless charsetnr.
+                    # See https://github.com/PyMySQL/PyMySQL/issues/488
+                    encoding = conn_encoding  # SELECT CAST(... AS JSON)
+                elif field_type in TEXT_TYPES:
+                    if field.charsetnr == 63:  # binary
+                        # TEXTs with charset=binary means BINARY types.
+                        encoding = None
+                    else:
+                        encoding = conn_encoding
+                else:
+                    # Integers, Dates and Times, and other basic data is encoded in ascii
+                    encoding = "ascii"
+            else:
+                encoding = None
+            converter = self.connection.decoders.get(field_type)
+            if converter is converters.through:
+                converter = None
+            if DEBUG:
+                print(f"DEBUG: field={field}, converter={converter}")
+            self.converters.append((encoding, converter))
+
+        eof_packet = self.connection._read_packet()
+        assert eof_packet.is_eof_packet(), "Protocol error, expecting EOF"
+        self.description = tuple(description)
+
+
+class LoadLocalFile:
+    def __init__(self, filename, connection):
+        self.filename = filename
+        self.connection = connection
+
+    def send_data(self):
+        """Send data packets from the local file to the server"""
+        if not self.connection._sock:
+            raise err.InterfaceError(0, "")
+        conn = self.connection
+
+        try:
+            with open(self.filename, "rb") as open_file:
+                packet_size = min(
+                    conn.max_allowed_packet, 16 * 1024
+                )  # 16KB is efficient enough
+                while True:
+                    chunk = open_file.read(packet_size)
+                    if not chunk:
+                        break
+                    conn.write_packet(chunk)
+        except IOError:
+            raise err.OperationalError(1017, f"Can't find file '{self.filename}'")
+        finally:
+            # send the empty packet to signify we are done sending data
+            conn.write_packet(b"")

+ 38 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/CLIENT.py

@@ -0,0 +1,38 @@
+# https://dev.mysql.com/doc/internals/en/capability-flags.html#packet-Protocol::CapabilityFlags
+LONG_PASSWORD = 1
+FOUND_ROWS = 1 << 1
+LONG_FLAG = 1 << 2
+CONNECT_WITH_DB = 1 << 3
+NO_SCHEMA = 1 << 4
+COMPRESS = 1 << 5
+ODBC = 1 << 6
+LOCAL_FILES = 1 << 7
+IGNORE_SPACE = 1 << 8
+PROTOCOL_41 = 1 << 9
+INTERACTIVE = 1 << 10
+SSL = 1 << 11
+IGNORE_SIGPIPE = 1 << 12
+TRANSACTIONS = 1 << 13
+SECURE_CONNECTION = 1 << 15
+MULTI_STATEMENTS = 1 << 16
+MULTI_RESULTS = 1 << 17
+PS_MULTI_RESULTS = 1 << 18
+PLUGIN_AUTH = 1 << 19
+CONNECT_ATTRS = 1 << 20
+PLUGIN_AUTH_LENENC_CLIENT_DATA = 1 << 21
+CAPABILITIES = (
+    LONG_PASSWORD
+    | LONG_FLAG
+    | PROTOCOL_41
+    | TRANSACTIONS
+    | SECURE_CONNECTION
+    | MULTI_RESULTS
+    | PLUGIN_AUTH
+    | PLUGIN_AUTH_LENENC_CLIENT_DATA
+    | CONNECT_ATTRS
+)
+
+# Not done yet
+HANDLE_EXPIRED_PASSWORDS = 1 << 22
+SESSION_TRACK = 1 << 23
+DEPRECATE_EOF = 1 << 24

+ 32 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/COMMAND.py

@@ -0,0 +1,32 @@
+COM_SLEEP = 0x00
+COM_QUIT = 0x01
+COM_INIT_DB = 0x02
+COM_QUERY = 0x03
+COM_FIELD_LIST = 0x04
+COM_CREATE_DB = 0x05
+COM_DROP_DB = 0x06
+COM_REFRESH = 0x07
+COM_SHUTDOWN = 0x08
+COM_STATISTICS = 0x09
+COM_PROCESS_INFO = 0x0A
+COM_CONNECT = 0x0B
+COM_PROCESS_KILL = 0x0C
+COM_DEBUG = 0x0D
+COM_PING = 0x0E
+COM_TIME = 0x0F
+COM_DELAYED_INSERT = 0x10
+COM_CHANGE_USER = 0x11
+COM_BINLOG_DUMP = 0x12
+COM_TABLE_DUMP = 0x13
+COM_CONNECT_OUT = 0x14
+COM_REGISTER_SLAVE = 0x15
+COM_STMT_PREPARE = 0x16
+COM_STMT_EXECUTE = 0x17
+COM_STMT_SEND_LONG_DATA = 0x18
+COM_STMT_CLOSE = 0x19
+COM_STMT_RESET = 0x1A
+COM_SET_OPTION = 0x1B
+COM_STMT_FETCH = 0x1C
+COM_DAEMON = 0x1D
+COM_BINLOG_DUMP_GTID = 0x1E
+COM_END = 0x1F

+ 68 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/CR.py

@@ -0,0 +1,68 @@
+# flake8: noqa
+# errmsg.h
+CR_ERROR_FIRST = 2000
+CR_UNKNOWN_ERROR = 2000
+CR_SOCKET_CREATE_ERROR = 2001
+CR_CONNECTION_ERROR = 2002
+CR_CONN_HOST_ERROR = 2003
+CR_IPSOCK_ERROR = 2004
+CR_UNKNOWN_HOST = 2005
+CR_SERVER_GONE_ERROR = 2006
+CR_VERSION_ERROR = 2007
+CR_OUT_OF_MEMORY = 2008
+CR_WRONG_HOST_INFO = 2009
+CR_LOCALHOST_CONNECTION = 2010
+CR_TCP_CONNECTION = 2011
+CR_SERVER_HANDSHAKE_ERR = 2012
+CR_SERVER_LOST = 2013
+CR_COMMANDS_OUT_OF_SYNC = 2014
+CR_NAMEDPIPE_CONNECTION = 2015
+CR_NAMEDPIPEWAIT_ERROR = 2016
+CR_NAMEDPIPEOPEN_ERROR = 2017
+CR_NAMEDPIPESETSTATE_ERROR = 2018
+CR_CANT_READ_CHARSET = 2019
+CR_NET_PACKET_TOO_LARGE = 2020
+CR_EMBEDDED_CONNECTION = 2021
+CR_PROBE_SLAVE_STATUS = 2022
+CR_PROBE_SLAVE_HOSTS = 2023
+CR_PROBE_SLAVE_CONNECT = 2024
+CR_PROBE_MASTER_CONNECT = 2025
+CR_SSL_CONNECTION_ERROR = 2026
+CR_MALFORMED_PACKET = 2027
+CR_WRONG_LICENSE = 2028
+
+CR_NULL_POINTER = 2029
+CR_NO_PREPARE_STMT = 2030
+CR_PARAMS_NOT_BOUND = 2031
+CR_DATA_TRUNCATED = 2032
+CR_NO_PARAMETERS_EXISTS = 2033
+CR_INVALID_PARAMETER_NO = 2034
+CR_INVALID_BUFFER_USE = 2035
+CR_UNSUPPORTED_PARAM_TYPE = 2036
+
+CR_SHARED_MEMORY_CONNECTION = 2037
+CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
+CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
+CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
+CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
+CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042
+CR_SHARED_MEMORY_MAP_ERROR = 2043
+CR_SHARED_MEMORY_EVENT_ERROR = 2044
+CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
+CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046
+CR_CONN_UNKNOW_PROTOCOL = 2047
+CR_INVALID_CONN_HANDLE = 2048
+CR_SECURE_AUTH = 2049
+CR_FETCH_CANCELED = 2050
+CR_NO_DATA = 2051
+CR_NO_STMT_METADATA = 2052
+CR_NO_RESULT_SET = 2053
+CR_NOT_IMPLEMENTED = 2054
+CR_SERVER_LOST_EXTENDED = 2055
+CR_STMT_CLOSED = 2056
+CR_NEW_STMT_METADATA = 2057
+CR_ALREADY_CONNECTED = 2058
+CR_AUTH_PLUGIN_CANNOT_LOAD = 2059
+CR_DUPLICATE_CONNECTION_ATTR = 2060
+CR_AUTH_PLUGIN_ERR = 2061
+CR_ERROR_LAST = 2061

+ 474 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/ER.py

@@ -0,0 +1,474 @@
+ERROR_FIRST = 1000
+HASHCHK = 1000
+NISAMCHK = 1001
+NO = 1002
+YES = 1003
+CANT_CREATE_FILE = 1004
+CANT_CREATE_TABLE = 1005
+CANT_CREATE_DB = 1006
+DB_CREATE_EXISTS = 1007
+DB_DROP_EXISTS = 1008
+DB_DROP_DELETE = 1009
+DB_DROP_RMDIR = 1010
+CANT_DELETE_FILE = 1011
+CANT_FIND_SYSTEM_REC = 1012
+CANT_GET_STAT = 1013
+CANT_GET_WD = 1014
+CANT_LOCK = 1015
+CANT_OPEN_FILE = 1016
+FILE_NOT_FOUND = 1017
+CANT_READ_DIR = 1018
+CANT_SET_WD = 1019
+CHECKREAD = 1020
+DISK_FULL = 1021
+DUP_KEY = 1022
+ERROR_ON_CLOSE = 1023
+ERROR_ON_READ = 1024
+ERROR_ON_RENAME = 1025
+ERROR_ON_WRITE = 1026
+FILE_USED = 1027
+FILSORT_ABORT = 1028
+FORM_NOT_FOUND = 1029
+GET_ERRNO = 1030
+ILLEGAL_HA = 1031
+KEY_NOT_FOUND = 1032
+NOT_FORM_FILE = 1033
+NOT_KEYFILE = 1034
+OLD_KEYFILE = 1035
+OPEN_AS_READONLY = 1036
+OUTOFMEMORY = 1037
+OUT_OF_SORTMEMORY = 1038
+UNEXPECTED_EOF = 1039
+CON_COUNT_ERROR = 1040
+OUT_OF_RESOURCES = 1041
+BAD_HOST_ERROR = 1042
+HANDSHAKE_ERROR = 1043
+DBACCESS_DENIED_ERROR = 1044
+ACCESS_DENIED_ERROR = 1045
+NO_DB_ERROR = 1046
+UNKNOWN_COM_ERROR = 1047
+BAD_NULL_ERROR = 1048
+BAD_DB_ERROR = 1049
+TABLE_EXISTS_ERROR = 1050
+BAD_TABLE_ERROR = 1051
+NON_UNIQ_ERROR = 1052
+SERVER_SHUTDOWN = 1053
+BAD_FIELD_ERROR = 1054
+WRONG_FIELD_WITH_GROUP = 1055
+WRONG_GROUP_FIELD = 1056
+WRONG_SUM_SELECT = 1057
+WRONG_VALUE_COUNT = 1058
+TOO_LONG_IDENT = 1059
+DUP_FIELDNAME = 1060
+DUP_KEYNAME = 1061
+DUP_ENTRY = 1062
+WRONG_FIELD_SPEC = 1063
+PARSE_ERROR = 1064
+EMPTY_QUERY = 1065
+NONUNIQ_TABLE = 1066
+INVALID_DEFAULT = 1067
+MULTIPLE_PRI_KEY = 1068
+TOO_MANY_KEYS = 1069
+TOO_MANY_KEY_PARTS = 1070
+TOO_LONG_KEY = 1071
+KEY_COLUMN_DOES_NOT_EXITS = 1072
+BLOB_USED_AS_KEY = 1073
+TOO_BIG_FIELDLENGTH = 1074
+WRONG_AUTO_KEY = 1075
+READY = 1076
+NORMAL_SHUTDOWN = 1077
+GOT_SIGNAL = 1078
+SHUTDOWN_COMPLETE = 1079
+FORCING_CLOSE = 1080
+IPSOCK_ERROR = 1081
+NO_SUCH_INDEX = 1082
+WRONG_FIELD_TERMINATORS = 1083
+BLOBS_AND_NO_TERMINATED = 1084
+TEXTFILE_NOT_READABLE = 1085
+FILE_EXISTS_ERROR = 1086
+LOAD_INFO = 1087
+ALTER_INFO = 1088
+WRONG_SUB_KEY = 1089
+CANT_REMOVE_ALL_FIELDS = 1090
+CANT_DROP_FIELD_OR_KEY = 1091
+INSERT_INFO = 1092
+UPDATE_TABLE_USED = 1093
+NO_SUCH_THREAD = 1094
+KILL_DENIED_ERROR = 1095
+NO_TABLES_USED = 1096
+TOO_BIG_SET = 1097
+NO_UNIQUE_LOGFILE = 1098
+TABLE_NOT_LOCKED_FOR_WRITE = 1099
+TABLE_NOT_LOCKED = 1100
+BLOB_CANT_HAVE_DEFAULT = 1101
+WRONG_DB_NAME = 1102
+WRONG_TABLE_NAME = 1103
+TOO_BIG_SELECT = 1104
+UNKNOWN_ERROR = 1105
+UNKNOWN_PROCEDURE = 1106
+WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
+WRONG_PARAMETERS_TO_PROCEDURE = 1108
+UNKNOWN_TABLE = 1109
+FIELD_SPECIFIED_TWICE = 1110
+INVALID_GROUP_FUNC_USE = 1111
+UNSUPPORTED_EXTENSION = 1112
+TABLE_MUST_HAVE_COLUMNS = 1113
+RECORD_FILE_FULL = 1114
+UNKNOWN_CHARACTER_SET = 1115
+TOO_MANY_TABLES = 1116
+TOO_MANY_FIELDS = 1117
+TOO_BIG_ROWSIZE = 1118
+STACK_OVERRUN = 1119
+WRONG_OUTER_JOIN = 1120
+NULL_COLUMN_IN_INDEX = 1121
+CANT_FIND_UDF = 1122
+CANT_INITIALIZE_UDF = 1123
+UDF_NO_PATHS = 1124
+UDF_EXISTS = 1125
+CANT_OPEN_LIBRARY = 1126
+CANT_FIND_DL_ENTRY = 1127
+FUNCTION_NOT_DEFINED = 1128
+HOST_IS_BLOCKED = 1129
+HOST_NOT_PRIVILEGED = 1130
+PASSWORD_ANONYMOUS_USER = 1131
+PASSWORD_NOT_ALLOWED = 1132
+PASSWORD_NO_MATCH = 1133
+UPDATE_INFO = 1134
+CANT_CREATE_THREAD = 1135
+WRONG_VALUE_COUNT_ON_ROW = 1136
+CANT_REOPEN_TABLE = 1137
+INVALID_USE_OF_NULL = 1138
+REGEXP_ERROR = 1139
+MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
+NONEXISTING_GRANT = 1141
+TABLEACCESS_DENIED_ERROR = 1142
+COLUMNACCESS_DENIED_ERROR = 1143
+ILLEGAL_GRANT_FOR_TABLE = 1144
+GRANT_WRONG_HOST_OR_USER = 1145
+NO_SUCH_TABLE = 1146
+NONEXISTING_TABLE_GRANT = 1147
+NOT_ALLOWED_COMMAND = 1148
+SYNTAX_ERROR = 1149
+DELAYED_CANT_CHANGE_LOCK = 1150
+TOO_MANY_DELAYED_THREADS = 1151
+ABORTING_CONNECTION = 1152
+NET_PACKET_TOO_LARGE = 1153
+NET_READ_ERROR_FROM_PIPE = 1154
+NET_FCNTL_ERROR = 1155
+NET_PACKETS_OUT_OF_ORDER = 1156
+NET_UNCOMPRESS_ERROR = 1157
+NET_READ_ERROR = 1158
+NET_READ_INTERRUPTED = 1159
+NET_ERROR_ON_WRITE = 1160
+NET_WRITE_INTERRUPTED = 1161
+TOO_LONG_STRING = 1162
+TABLE_CANT_HANDLE_BLOB = 1163
+TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
+DELAYED_INSERT_TABLE_LOCKED = 1165
+WRONG_COLUMN_NAME = 1166
+WRONG_KEY_COLUMN = 1167
+WRONG_MRG_TABLE = 1168
+DUP_UNIQUE = 1169
+BLOB_KEY_WITHOUT_LENGTH = 1170
+PRIMARY_CANT_HAVE_NULL = 1171
+TOO_MANY_ROWS = 1172
+REQUIRES_PRIMARY_KEY = 1173
+NO_RAID_COMPILED = 1174
+UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
+KEY_DOES_NOT_EXITS = 1176
+CHECK_NO_SUCH_TABLE = 1177
+CHECK_NOT_IMPLEMENTED = 1178
+CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
+ERROR_DURING_COMMIT = 1180
+ERROR_DURING_ROLLBACK = 1181
+ERROR_DURING_FLUSH_LOGS = 1182
+ERROR_DURING_CHECKPOINT = 1183
+NEW_ABORTING_CONNECTION = 1184
+DUMP_NOT_IMPLEMENTED = 1185
+FLUSH_MASTER_BINLOG_CLOSED = 1186
+INDEX_REBUILD = 1187
+MASTER = 1188
+MASTER_NET_READ = 1189
+MASTER_NET_WRITE = 1190
+FT_MATCHING_KEY_NOT_FOUND = 1191
+LOCK_OR_ACTIVE_TRANSACTION = 1192
+UNKNOWN_SYSTEM_VARIABLE = 1193
+CRASHED_ON_USAGE = 1194
+CRASHED_ON_REPAIR = 1195
+WARNING_NOT_COMPLETE_ROLLBACK = 1196
+TRANS_CACHE_FULL = 1197
+SLAVE_MUST_STOP = 1198
+SLAVE_NOT_RUNNING = 1199
+BAD_SLAVE = 1200
+MASTER_INFO = 1201
+SLAVE_THREAD = 1202
+TOO_MANY_USER_CONNECTIONS = 1203
+SET_CONSTANTS_ONLY = 1204
+LOCK_WAIT_TIMEOUT = 1205
+LOCK_TABLE_FULL = 1206
+READ_ONLY_TRANSACTION = 1207
+DROP_DB_WITH_READ_LOCK = 1208
+CREATE_DB_WITH_READ_LOCK = 1209
+WRONG_ARGUMENTS = 1210
+NO_PERMISSION_TO_CREATE_USER = 1211
+UNION_TABLES_IN_DIFFERENT_DIR = 1212
+LOCK_DEADLOCK = 1213
+TABLE_CANT_HANDLE_FT = 1214
+CANNOT_ADD_FOREIGN = 1215
+NO_REFERENCED_ROW = 1216
+ROW_IS_REFERENCED = 1217
+CONNECT_TO_MASTER = 1218
+QUERY_ON_MASTER = 1219
+ERROR_WHEN_EXECUTING_COMMAND = 1220
+WRONG_USAGE = 1221
+WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
+CANT_UPDATE_WITH_READLOCK = 1223
+MIXING_NOT_ALLOWED = 1224
+DUP_ARGUMENT = 1225
+USER_LIMIT_REACHED = 1226
+SPECIFIC_ACCESS_DENIED_ERROR = 1227
+LOCAL_VARIABLE = 1228
+GLOBAL_VARIABLE = 1229
+NO_DEFAULT = 1230
+WRONG_VALUE_FOR_VAR = 1231
+WRONG_TYPE_FOR_VAR = 1232
+VAR_CANT_BE_READ = 1233
+CANT_USE_OPTION_HERE = 1234
+NOT_SUPPORTED_YET = 1235
+MASTER_FATAL_ERROR_READING_BINLOG = 1236
+SLAVE_IGNORED_TABLE = 1237
+INCORRECT_GLOBAL_LOCAL_VAR = 1238
+WRONG_FK_DEF = 1239
+KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
+OPERAND_COLUMNS = 1241
+SUBQUERY_NO_1_ROW = 1242
+UNKNOWN_STMT_HANDLER = 1243
+CORRUPT_HELP_DB = 1244
+CYCLIC_REFERENCE = 1245
+AUTO_CONVERT = 1246
+ILLEGAL_REFERENCE = 1247
+DERIVED_MUST_HAVE_ALIAS = 1248
+SELECT_REDUCED = 1249
+TABLENAME_NOT_ALLOWED_HERE = 1250
+NOT_SUPPORTED_AUTH_MODE = 1251
+SPATIAL_CANT_HAVE_NULL = 1252
+COLLATION_CHARSET_MISMATCH = 1253
+SLAVE_WAS_RUNNING = 1254
+SLAVE_WAS_NOT_RUNNING = 1255
+TOO_BIG_FOR_UNCOMPRESS = 1256
+ZLIB_Z_MEM_ERROR = 1257
+ZLIB_Z_BUF_ERROR = 1258
+ZLIB_Z_DATA_ERROR = 1259
+CUT_VALUE_GROUP_CONCAT = 1260
+WARN_TOO_FEW_RECORDS = 1261
+WARN_TOO_MANY_RECORDS = 1262
+WARN_NULL_TO_NOTNULL = 1263
+WARN_DATA_OUT_OF_RANGE = 1264
+WARN_DATA_TRUNCATED = 1265
+WARN_USING_OTHER_HANDLER = 1266
+CANT_AGGREGATE_2COLLATIONS = 1267
+DROP_USER = 1268
+REVOKE_GRANTS = 1269
+CANT_AGGREGATE_3COLLATIONS = 1270
+CANT_AGGREGATE_NCOLLATIONS = 1271
+VARIABLE_IS_NOT_STRUCT = 1272
+UNKNOWN_COLLATION = 1273
+SLAVE_IGNORED_SSL_PARAMS = 1274
+SERVER_IS_IN_SECURE_AUTH_MODE = 1275
+WARN_FIELD_RESOLVED = 1276
+BAD_SLAVE_UNTIL_COND = 1277
+MISSING_SKIP_SLAVE = 1278
+UNTIL_COND_IGNORED = 1279
+WRONG_NAME_FOR_INDEX = 1280
+WRONG_NAME_FOR_CATALOG = 1281
+WARN_QC_RESIZE = 1282
+BAD_FT_COLUMN = 1283
+UNKNOWN_KEY_CACHE = 1284
+WARN_HOSTNAME_WONT_WORK = 1285
+UNKNOWN_STORAGE_ENGINE = 1286
+WARN_DEPRECATED_SYNTAX = 1287
+NON_UPDATABLE_TABLE = 1288
+FEATURE_DISABLED = 1289
+OPTION_PREVENTS_STATEMENT = 1290
+DUPLICATED_VALUE_IN_TYPE = 1291
+TRUNCATED_WRONG_VALUE = 1292
+TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293
+INVALID_ON_UPDATE = 1294
+UNSUPPORTED_PS = 1295
+GET_ERRMSG = 1296
+GET_TEMPORARY_ERRMSG = 1297
+UNKNOWN_TIME_ZONE = 1298
+WARN_INVALID_TIMESTAMP = 1299
+INVALID_CHARACTER_STRING = 1300
+WARN_ALLOWED_PACKET_OVERFLOWED = 1301
+CONFLICTING_DECLARATIONS = 1302
+SP_NO_RECURSIVE_CREATE = 1303
+SP_ALREADY_EXISTS = 1304
+SP_DOES_NOT_EXIST = 1305
+SP_DROP_FAILED = 1306
+SP_STORE_FAILED = 1307
+SP_LILABEL_MISMATCH = 1308
+SP_LABEL_REDEFINE = 1309
+SP_LABEL_MISMATCH = 1310
+SP_UNINIT_VAR = 1311
+SP_BADSELECT = 1312
+SP_BADRETURN = 1313
+SP_BADSTATEMENT = 1314
+UPDATE_LOG_DEPRECATED_IGNORED = 1315
+UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
+QUERY_INTERRUPTED = 1317
+SP_WRONG_NO_OF_ARGS = 1318
+SP_COND_MISMATCH = 1319
+SP_NORETURN = 1320
+SP_NORETURNEND = 1321
+SP_BAD_CURSOR_QUERY = 1322
+SP_BAD_CURSOR_SELECT = 1323
+SP_CURSOR_MISMATCH = 1324
+SP_CURSOR_ALREADY_OPEN = 1325
+SP_CURSOR_NOT_OPEN = 1326
+SP_UNDECLARED_VAR = 1327
+SP_WRONG_NO_OF_FETCH_ARGS = 1328
+SP_FETCH_NO_DATA = 1329
+SP_DUP_PARAM = 1330
+SP_DUP_VAR = 1331
+SP_DUP_COND = 1332
+SP_DUP_CURS = 1333
+SP_CANT_ALTER = 1334
+SP_SUBSELECT_NYI = 1335
+STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
+SP_VARCOND_AFTER_CURSHNDLR = 1337
+SP_CURSOR_AFTER_HANDLER = 1338
+SP_CASE_NOT_FOUND = 1339
+FPARSER_TOO_BIG_FILE = 1340
+FPARSER_BAD_HEADER = 1341
+FPARSER_EOF_IN_COMMENT = 1342
+FPARSER_ERROR_IN_PARAMETER = 1343
+FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
+VIEW_NO_EXPLAIN = 1345
+FRM_UNKNOWN_TYPE = 1346
+WRONG_OBJECT = 1347
+NONUPDATEABLE_COLUMN = 1348
+VIEW_SELECT_DERIVED = 1349
+VIEW_SELECT_CLAUSE = 1350
+VIEW_SELECT_VARIABLE = 1351
+VIEW_SELECT_TMPTABLE = 1352
+VIEW_WRONG_LIST = 1353
+WARN_VIEW_MERGE = 1354
+WARN_VIEW_WITHOUT_KEY = 1355
+VIEW_INVALID = 1356
+SP_NO_DROP_SP = 1357
+SP_GOTO_IN_HNDLR = 1358
+TRG_ALREADY_EXISTS = 1359
+TRG_DOES_NOT_EXIST = 1360
+TRG_ON_VIEW_OR_TEMP_TABLE = 1361
+TRG_CANT_CHANGE_ROW = 1362
+TRG_NO_SUCH_ROW_IN_TRG = 1363
+NO_DEFAULT_FOR_FIELD = 1364
+DIVISION_BY_ZERO = 1365
+TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
+ILLEGAL_VALUE_FOR_TYPE = 1367
+VIEW_NONUPD_CHECK = 1368
+VIEW_CHECK_FAILED = 1369
+PROCACCESS_DENIED_ERROR = 1370
+RELAY_LOG_FAIL = 1371
+PASSWD_LENGTH = 1372
+UNKNOWN_TARGET_BINLOG = 1373
+IO_ERR_LOG_INDEX_READ = 1374
+BINLOG_PURGE_PROHIBITED = 1375
+FSEEK_FAIL = 1376
+BINLOG_PURGE_FATAL_ERR = 1377
+LOG_IN_USE = 1378
+LOG_PURGE_UNKNOWN_ERR = 1379
+RELAY_LOG_INIT = 1380
+NO_BINARY_LOGGING = 1381
+RESERVED_SYNTAX = 1382
+WSAS_FAILED = 1383
+DIFF_GROUPS_PROC = 1384
+NO_GROUP_FOR_PROC = 1385
+ORDER_WITH_PROC = 1386
+LOGGING_PROHIBIT_CHANGING_OF = 1387
+NO_FILE_MAPPING = 1388
+WRONG_MAGIC = 1389
+PS_MANY_PARAM = 1390
+KEY_PART_0 = 1391
+VIEW_CHECKSUM = 1392
+VIEW_MULTIUPDATE = 1393
+VIEW_NO_INSERT_FIELD_LIST = 1394
+VIEW_DELETE_MERGE_VIEW = 1395
+CANNOT_USER = 1396
+XAER_NOTA = 1397
+XAER_INVAL = 1398
+XAER_RMFAIL = 1399
+XAER_OUTSIDE = 1400
+XAER_RMERR = 1401
+XA_RBROLLBACK = 1402
+NONEXISTING_PROC_GRANT = 1403
+PROC_AUTO_GRANT_FAIL = 1404
+PROC_AUTO_REVOKE_FAIL = 1405
+DATA_TOO_LONG = 1406
+SP_BAD_SQLSTATE = 1407
+STARTUP = 1408
+LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
+CANT_CREATE_USER_WITH_GRANT = 1410
+WRONG_VALUE_FOR_TYPE = 1411
+TABLE_DEF_CHANGED = 1412
+SP_DUP_HANDLER = 1413
+SP_NOT_VAR_ARG = 1414
+SP_NO_RETSET = 1415
+CANT_CREATE_GEOMETRY_OBJECT = 1416
+FAILED_ROUTINE_BREAK_BINLOG = 1417
+BINLOG_UNSAFE_ROUTINE = 1418
+BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
+EXEC_STMT_WITH_OPEN_CURSOR = 1420
+STMT_HAS_NO_OPEN_CURSOR = 1421
+COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
+NO_DEFAULT_FOR_VIEW_FIELD = 1423
+SP_NO_RECURSION = 1424
+TOO_BIG_SCALE = 1425
+TOO_BIG_PRECISION = 1426
+M_BIGGER_THAN_D = 1427
+WRONG_LOCK_OF_SYSTEM_TABLE = 1428
+CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
+QUERY_ON_FOREIGN_DATA_SOURCE = 1430
+FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
+FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
+FOREIGN_DATA_STRING_INVALID = 1433
+CANT_CREATE_FEDERATED_TABLE = 1434
+TRG_IN_WRONG_SCHEMA = 1435
+STACK_OVERRUN_NEED_MORE = 1436
+TOO_LONG_BODY = 1437
+WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
+TOO_BIG_DISPLAYWIDTH = 1439
+XAER_DUPID = 1440
+DATETIME_FUNCTION_OVERFLOW = 1441
+CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
+VIEW_PREVENT_UPDATE = 1443
+PS_NO_RECURSION = 1444
+SP_CANT_SET_AUTOCOMMIT = 1445
+MALFORMED_DEFINER = 1446
+VIEW_FRM_NO_USER = 1447
+VIEW_OTHER_USER = 1448
+NO_SUCH_USER = 1449
+FORBID_SCHEMA_CHANGE = 1450
+ROW_IS_REFERENCED_2 = 1451
+NO_REFERENCED_ROW_2 = 1452
+SP_BAD_VAR_SHADOW = 1453
+TRG_NO_DEFINER = 1454
+OLD_FILE_FORMAT = 1455
+SP_RECURSION_LIMIT = 1456
+SP_PROC_TABLE_CORRUPT = 1457
+SP_WRONG_NAME = 1458
+TABLE_NEEDS_UPGRADE = 1459
+SP_NO_AGGREGATE = 1460
+MAX_PREPARED_STMT_COUNT_REACHED = 1461
+VIEW_RECURSIVE = 1462
+NON_GROUPING_FIELD_USED = 1463
+TABLE_CANT_HANDLE_SPKEYS = 1464
+NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
+USERNAME = 1466
+HOSTNAME = 1467
+WRONG_STRING_LENGTH = 1468
+ERROR_LAST = 1468
+
+# https://github.com/PyMySQL/PyMySQL/issues/607
+CONSTRAINT_FAILED = 4025

+ 31 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/FIELD_TYPE.py

@@ -0,0 +1,31 @@
+DECIMAL = 0
+TINY = 1
+SHORT = 2
+LONG = 3
+FLOAT = 4
+DOUBLE = 5
+NULL = 6
+TIMESTAMP = 7
+LONGLONG = 8
+INT24 = 9
+DATE = 10
+TIME = 11
+DATETIME = 12
+YEAR = 13
+NEWDATE = 14
+VARCHAR = 15
+BIT = 16
+JSON = 245
+NEWDECIMAL = 246
+ENUM = 247
+SET = 248
+TINY_BLOB = 249
+MEDIUM_BLOB = 250
+LONG_BLOB = 251
+BLOB = 252
+VAR_STRING = 253
+STRING = 254
+GEOMETRY = 255
+
+CHAR = TINY
+INTERVAL = ENUM

+ 15 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/FLAG.py

@@ -0,0 +1,15 @@
+NOT_NULL = 1
+PRI_KEY = 2
+UNIQUE_KEY = 4
+MULTIPLE_KEY = 8
+BLOB = 16
+UNSIGNED = 32
+ZEROFILL = 64
+BINARY = 128
+ENUM = 256
+AUTO_INCREMENT = 512
+TIMESTAMP = 1024
+SET = 2048
+PART_KEY = 16384
+GROUP = 32767
+UNIQUE = 65536

+ 10 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/SERVER_STATUS.py

@@ -0,0 +1,10 @@
+SERVER_STATUS_IN_TRANS = 1
+SERVER_STATUS_AUTOCOMMIT = 2
+SERVER_MORE_RESULTS_EXISTS = 8
+SERVER_QUERY_NO_GOOD_INDEX_USED = 16
+SERVER_QUERY_NO_INDEX_USED = 32
+SERVER_STATUS_CURSOR_EXISTS = 64
+SERVER_STATUS_LAST_ROW_SENT = 128
+SERVER_STATUS_DB_DROPPED = 256
+SERVER_STATUS_NO_BACKSLASH_ESCAPES = 512
+SERVER_STATUS_METADATA_CHANGED = 1024

+ 0 - 0
ambt-preden-monthlymigration-dev/package/pymysql/constants/__init__.py


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/CLIENT.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/COMMAND.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/CR.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/ER.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/FIELD_TYPE.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/FLAG.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/SERVER_STATUS.cpython-38.pyc


BIN
ambt-preden-monthlymigration-dev/package/pymysql/constants/__pycache__/__init__.cpython-38.pyc


+ 364 - 0
ambt-preden-monthlymigration-dev/package/pymysql/converters.py

@@ -0,0 +1,364 @@
+import datetime
+from decimal import Decimal
+import re
+import time
+
+from .err import ProgrammingError
+from .constants import FIELD_TYPE
+
+
+def escape_item(val, charset, mapping=None):
+    if mapping is None:
+        mapping = encoders
+    encoder = mapping.get(type(val))
+
+    # Fallback to default when no encoder found
+    if not encoder:
+        try:
+            encoder = mapping[str]
+        except KeyError:
+            raise TypeError("no default type converter defined")
+
+    if encoder in (escape_dict, escape_sequence):
+        val = encoder(val, charset, mapping)
+    else:
+        val = encoder(val, mapping)
+    return val
+
+
+def escape_dict(val, charset, mapping=None):
+    n = {}
+    for k, v in val.items():
+        quoted = escape_item(v, charset, mapping)
+        n[k] = quoted
+    return n
+
+
+def escape_sequence(val, charset, mapping=None):
+    n = []
+    for item in val:
+        quoted = escape_item(item, charset, mapping)
+        n.append(quoted)
+    return "(" + ",".join(n) + ")"
+
+
+def escape_set(val, charset, mapping=None):
+    return ",".join([escape_item(x, charset, mapping) for x in val])
+
+
+def escape_bool(value, mapping=None):
+    return str(int(value))
+
+
+def escape_int(value, mapping=None):
+    return str(value)
+
+
+def escape_float(value, mapping=None):
+    s = repr(value)
+    if s in ("inf", "nan"):
+        raise ProgrammingError("%s can not be used with MySQL" % s)
+    if "e" not in s:
+        s += "e0"
+    return s
+
+
+_escape_table = [chr(x) for x in range(128)]
+_escape_table[0] = "\\0"
+_escape_table[ord("\\")] = "\\\\"
+_escape_table[ord("\n")] = "\\n"
+_escape_table[ord("\r")] = "\\r"
+_escape_table[ord("\032")] = "\\Z"
+_escape_table[ord('"')] = '\\"'
+_escape_table[ord("'")] = "\\'"
+
+
+def escape_string(value, mapping=None):
+    """escapes *value* without adding quote.
+
+    Value should be unicode
+    """
+    return value.translate(_escape_table)
+
+
+def escape_bytes_prefixed(value, mapping=None):
+    return "_binary'%s'" % value.decode("ascii", "surrogateescape").translate(
+        _escape_table
+    )
+
+
+def escape_bytes(value, mapping=None):
+    return "'%s'" % value.decode("ascii", "surrogateescape").translate(_escape_table)
+
+
+def escape_str(value, mapping=None):
+    return "'%s'" % escape_string(str(value), mapping)
+
+
+def escape_None(value, mapping=None):
+    return "NULL"
+
+
+def escape_timedelta(obj, mapping=None):
+    seconds = int(obj.seconds) % 60
+    minutes = int(obj.seconds // 60) % 60
+    hours = int(obj.seconds // 3600) % 24 + int(obj.days) * 24
+    if obj.microseconds:
+        fmt = "'{0:02d}:{1:02d}:{2:02d}.{3:06d}'"
+    else:
+        fmt = "'{0:02d}:{1:02d}:{2:02d}'"
+    return fmt.format(hours, minutes, seconds, obj.microseconds)
+
+
+def escape_time(obj, mapping=None):
+    if obj.microsecond:
+        fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
+    else:
+        fmt = "'{0.hour:02}:{0.minute:02}:{0.second:02}'"
+    return fmt.format(obj)
+
+
+def escape_datetime(obj, mapping=None):
+    if obj.microsecond:
+        fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}'"
+    else:
+        fmt = "'{0.year:04}-{0.month:02}-{0.day:02} {0.hour:02}:{0.minute:02}:{0.second:02}'"
+    return fmt.format(obj)
+
+
+def escape_date(obj, mapping=None):
+    fmt = "'{0.year:04}-{0.month:02}-{0.day:02}'"
+    return fmt.format(obj)
+
+
+def escape_struct_time(obj, mapping=None):
+    return escape_datetime(datetime.datetime(*obj[:6]))
+
+
+def Decimal2Literal(o, d):
+    return format(o, "f")
+
+
+def _convert_second_fraction(s):
+    if not s:
+        return 0
+    # Pad zeros to ensure the fraction length in microseconds
+    s = s.ljust(6, "0")
+    return int(s[:6])
+
+
+DATETIME_RE = re.compile(
+    r"(\d{1,4})-(\d{1,2})-(\d{1,2})[T ](\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?"
+)
+
+
+def convert_datetime(obj):
+    """Returns a DATETIME or TIMESTAMP column value as a datetime object:
+
+      >>> datetime_or_None('2007-02-25 23:06:20')
+      datetime.datetime(2007, 2, 25, 23, 6, 20)
+      >>> datetime_or_None('2007-02-25T23:06:20')
+      datetime.datetime(2007, 2, 25, 23, 6, 20)
+
+    Illegal values are returned as None:
+
+      >>> datetime_or_None('2007-02-31T23:06:20') is None
+      True
+      >>> datetime_or_None('0000-00-00 00:00:00') is None
+      True
+
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+
+    m = DATETIME_RE.match(obj)
+    if not m:
+        return convert_date(obj)
+
+    try:
+        groups = list(m.groups())
+        groups[-1] = _convert_second_fraction(groups[-1])
+        return datetime.datetime(*[int(x) for x in groups])
+    except ValueError:
+        return convert_date(obj)
+
+
+TIMEDELTA_RE = re.compile(r"(-)?(\d{1,3}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
+
+
+def convert_timedelta(obj):
+    """Returns a TIME column as a timedelta object:
+
+      >>> timedelta_or_None('25:06:17')
+      datetime.timedelta(1, 3977)
+      >>> timedelta_or_None('-25:06:17')
+      datetime.timedelta(-2, 83177)
+
+    Illegal values are returned as None:
+
+      >>> timedelta_or_None('random crap') is None
+      True
+
+    Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
+    can accept values as (+|-)DD HH:MM:SS. The latter format will not
+    be parsed correctly by this function.
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+
+    m = TIMEDELTA_RE.match(obj)
+    if not m:
+        return obj
+
+    try:
+        groups = list(m.groups())
+        groups[-1] = _convert_second_fraction(groups[-1])
+        negate = -1 if groups[0] else 1
+        hours, minutes, seconds, microseconds = groups[1:]
+
+        tdelta = (
+            datetime.timedelta(
+                hours=int(hours),
+                minutes=int(minutes),
+                seconds=int(seconds),
+                microseconds=int(microseconds),
+            )
+            * negate
+        )
+        return tdelta
+    except ValueError:
+        return obj
+
+
+TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})(?:.(\d{1,6}))?")
+
+
+def convert_time(obj):
+    """Returns a TIME column as a time object:
+
+      >>> time_or_None('15:06:17')
+      datetime.time(15, 6, 17)
+
+    Illegal values are returned as None:
+
+      >>> time_or_None('-25:06:17') is None
+      True
+      >>> time_or_None('random crap') is None
+      True
+
+    Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but
+    can accept values as (+|-)DD HH:MM:SS. The latter format will not
+    be parsed correctly by this function.
+
+    Also note that MySQL's TIME column corresponds more closely to
+    Python's timedelta and not time. However if you want TIME columns
+    to be treated as time-of-day and not a time offset, then you can
+    use set this function as the converter for FIELD_TYPE.TIME.
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+
+    m = TIME_RE.match(obj)
+    if not m:
+        return obj
+
+    try:
+        groups = list(m.groups())
+        groups[-1] = _convert_second_fraction(groups[-1])
+        hours, minutes, seconds, microseconds = groups
+        return datetime.time(
+            hour=int(hours),
+            minute=int(minutes),
+            second=int(seconds),
+            microsecond=int(microseconds),
+        )
+    except ValueError:
+        return obj
+
+
+def convert_date(obj):
+    """Returns a DATE column as a date object:
+
+      >>> date_or_None('2007-02-26')
+      datetime.date(2007, 2, 26)
+
+    Illegal values are returned as None:
+
+      >>> date_or_None('2007-02-31') is None
+      True
+      >>> date_or_None('0000-00-00') is None
+      True
+
+    """
+    if isinstance(obj, (bytes, bytearray)):
+        obj = obj.decode("ascii")
+    try:
+        return datetime.date(*[int(x) for x in obj.split("-", 2)])
+    except ValueError:
+        return obj
+
+
+def through(x):
+    return x
+
+
+# def convert_bit(b):
+#    b = "\x00" * (8 - len(b)) + b # pad w/ zeroes
+#    return struct.unpack(">Q", b)[0]
+#
+#     the snippet above is right, but MySQLdb doesn't process bits,
+#     so we shouldn't either
+convert_bit = through
+
+
+encoders = {
+    bool: escape_bool,
+    int: escape_int,
+    float: escape_float,
+    str: escape_str,
+    bytes: escape_bytes,
+    tuple: escape_sequence,
+    list: escape_sequence,
+    set: escape_sequence,
+    frozenset: escape_sequence,
+    dict: escape_dict,
+    type(None): escape_None,
+    datetime.date: escape_date,
+    datetime.datetime: escape_datetime,
+    datetime.timedelta: escape_timedelta,
+    datetime.time: escape_time,
+    time.struct_time: escape_struct_time,
+    Decimal: Decimal2Literal,
+}
+
+
+decoders = {
+    FIELD_TYPE.BIT: convert_bit,
+    FIELD_TYPE.TINY: int,
+    FIELD_TYPE.SHORT: int,
+    FIELD_TYPE.LONG: int,
+    FIELD_TYPE.FLOAT: float,
+    FIELD_TYPE.DOUBLE: float,
+    FIELD_TYPE.LONGLONG: int,
+    FIELD_TYPE.INT24: int,
+    FIELD_TYPE.YEAR: int,
+    FIELD_TYPE.TIMESTAMP: convert_datetime,
+    FIELD_TYPE.DATETIME: convert_datetime,
+    FIELD_TYPE.TIME: convert_timedelta,
+    FIELD_TYPE.DATE: convert_date,
+    FIELD_TYPE.BLOB: through,
+    FIELD_TYPE.TINY_BLOB: through,
+    FIELD_TYPE.MEDIUM_BLOB: through,
+    FIELD_TYPE.LONG_BLOB: through,
+    FIELD_TYPE.STRING: through,
+    FIELD_TYPE.VAR_STRING: through,
+    FIELD_TYPE.VARCHAR: through,
+    FIELD_TYPE.DECIMAL: Decimal,
+    FIELD_TYPE.NEWDECIMAL: Decimal,
+}
+
+
+# for MySQLdb compatibility
+conversions = encoders.copy()
+conversions.update(decoders)
+Thing2Literal = escape_str

+ 496 - 0
ambt-preden-monthlymigration-dev/package/pymysql/cursors.py

@@ -0,0 +1,496 @@
+import re
+from . import err
+
+
+#: Regular expression for :meth:`Cursor.executemany`.
+#: executemany only supports simple bulk insert.
+#: You can use it to load large dataset.
+RE_INSERT_VALUES = re.compile(
+    r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)"
+    + r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))"
+    + r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
+    re.IGNORECASE | re.DOTALL,
+)
+
+
+class Cursor:
+    """
+    This is the object you use to interact with the database.
+
+    Do not create an instance of a Cursor yourself. Call
+    connections.Connection.cursor().
+
+    See `Cursor <https://www.python.org/dev/peps/pep-0249/#cursor-objects>`_ in
+    the specification.
+    """
+
+    #: Max statement size which :meth:`executemany` generates.
+    #:
+    #: Max size of allowed statement is max_allowed_packet - packet_header_size.
+    #: Default value of max_allowed_packet is 1048576.
+    max_stmt_length = 1024000
+
+    def __init__(self, connection):
+        self.connection = connection
+        self.description = None
+        self.rownumber = 0
+        self.rowcount = -1
+        self.arraysize = 1
+        self._executed = None
+        self._result = None
+        self._rows = None
+
+    def close(self):
+        """
+        Closing a cursor just exhausts all remaining data.
+        """
+        conn = self.connection
+        if conn is None:
+            return
+        try:
+            while self.nextset():
+                pass
+        finally:
+            self.connection = None
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, *exc_info):
+        del exc_info
+        self.close()
+
+    def _get_db(self):
+        if not self.connection:
+            raise err.ProgrammingError("Cursor closed")
+        return self.connection
+
+    def _check_executed(self):
+        if not self._executed:
+            raise err.ProgrammingError("execute() first")
+
+    def _conv_row(self, row):
+        return row
+
+    def setinputsizes(self, *args):
+        """Does nothing, required by DB API."""
+
+    def setoutputsizes(self, *args):
+        """Does nothing, required by DB API."""
+
+    def _nextset(self, unbuffered=False):
+        """Get the next query set"""
+        conn = self._get_db()
+        current_result = self._result
+        if current_result is None or current_result is not conn._result:
+            return None
+        if not current_result.has_next:
+            return None
+        self._result = None
+        self._clear_result()
+        conn.next_result(unbuffered=unbuffered)
+        self._do_get_result()
+        return True
+
+    def nextset(self):
+        return self._nextset(False)
+
+    def _ensure_bytes(self, x, encoding=None):
+        if isinstance(x, str):
+            x = x.encode(encoding)
+        elif isinstance(x, (tuple, list)):
+            x = type(x)(self._ensure_bytes(v, encoding=encoding) for v in x)
+        return x
+
+    def _escape_args(self, args, conn):
+        if isinstance(args, (tuple, list)):
+            return tuple(conn.literal(arg) for arg in args)
+        elif isinstance(args, dict):
+            return {key: conn.literal(val) for (key, val) in args.items()}
+        else:
+            # If it's not a dictionary let's try escaping it anyways.
+            # Worst case it will throw a Value error
+            return conn.escape(args)
+
+    def mogrify(self, query, args=None):
+        """
+        Returns the exact string that is sent to the database by calling the
+        execute() method.
+
+        This method follows the extension to the DB API 2.0 followed by Psycopg.
+        """
+        conn = self._get_db()
+
+        if args is not None:
+            query = query % self._escape_args(args, conn)
+
+        return query
+
+    def execute(self, query, args=None):
+        """Execute a query
+
+        :param str query: Query to execute.
+
+        :param args: parameters used with query. (optional)
+        :type args: tuple, list or dict
+
+        :return: Number of affected rows
+        :rtype: int
+
+        If args is a list or tuple, %s can be used as a placeholder in the query.
+        If args is a dict, %(name)s can be used as a placeholder in the query.
+        """
+        while self.nextset():
+            pass
+
+        query = self.mogrify(query, args)
+
+        result = self._query(query)
+        self._executed = query
+        return result
+
+    def executemany(self, query, args):
+        # type: (str, list) -> int
+        """Run several data against one query
+
+        :param query: query to execute on server
+        :param args:  Sequence of sequences or mappings.  It is used as parameter.
+        :return: Number of rows affected, if any.
+
+        This method improves performance on multiple-row INSERT and
+        REPLACE. Otherwise it is equivalent to looping over args with
+        execute().
+        """
+        if not args:
+            return
+
+        m = RE_INSERT_VALUES.match(query)
+        if m:
+            q_prefix = m.group(1) % ()
+            q_values = m.group(2).rstrip()
+            q_postfix = m.group(3) or ""
+            assert q_values[0] == "(" and q_values[-1] == ")"
+            return self._do_execute_many(
+                q_prefix,
+                q_values,
+                q_postfix,
+                args,
+                self.max_stmt_length,
+                self._get_db().encoding,
+            )
+
+        self.rowcount = sum(self.execute(query, arg) for arg in args)
+        return self.rowcount
+
+    def _do_execute_many(
+        self, prefix, values, postfix, args, max_stmt_length, encoding
+    ):
+        conn = self._get_db()
+        escape = self._escape_args
+        if isinstance(prefix, str):
+            prefix = prefix.encode(encoding)
+        if isinstance(postfix, str):
+            postfix = postfix.encode(encoding)
+        sql = bytearray(prefix)
+        args = iter(args)
+        v = values % escape(next(args), conn)
+        if isinstance(v, str):
+            v = v.encode(encoding, "surrogateescape")
+        sql += v
+        rows = 0
+        for arg in args:
+            v = values % escape(arg, conn)
+            if isinstance(v, str):
+                v = v.encode(encoding, "surrogateescape")
+            if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
+                rows += self.execute(sql + postfix)
+                sql = bytearray(prefix)
+            else:
+                sql += b","
+            sql += v
+        rows += self.execute(sql + postfix)
+        self.rowcount = rows
+        return rows
+
+    def callproc(self, procname, args=()):
+        """Execute stored procedure procname with args
+
+        procname -- string, name of procedure to execute on server
+
+        args -- Sequence of parameters to use with procedure
+
+        Returns the original args.
+
+        Compatibility warning: PEP-249 specifies that any modified
+        parameters must be returned. This is currently impossible
+        as they are only available by storing them in a server
+        variable and then retrieved by a query. Since stored
+        procedures return zero or more result sets, there is no
+        reliable way to get at OUT or INOUT parameters via callproc.
+        The server variables are named @_procname_n, where procname
+        is the parameter above and n is the position of the parameter
+        (from zero). Once all result sets generated by the procedure
+        have been fetched, you can issue a SELECT @_procname_0, ...
+        query using .execute() to get any OUT or INOUT values.
+
+        Compatibility warning: The act of calling a stored procedure
+        itself creates an empty result set. This appears after any
+        result sets generated by the procedure. This is non-standard
+        behavior with respect to the DB-API. Be sure to use nextset()
+        to advance through all result sets; otherwise you may get
+        disconnected.
+        """
+        conn = self._get_db()
+        if args:
+            fmt = f"@_{procname}_%d=%s"
+            self._query(
+                "SET %s"
+                % ",".join(
+                    fmt % (index, conn.escape(arg)) for index, arg in enumerate(args)
+                )
+            )
+            self.nextset()
+
+        q = "CALL %s(%s)" % (
+            procname,
+            ",".join(["@_%s_%d" % (procname, i) for i in range(len(args))]),
+        )
+        self._query(q)
+        self._executed = q
+        return args
+
+    def fetchone(self):
+        """Fetch the next row"""
+        self._check_executed()
+        if self._rows is None or self.rownumber >= len(self._rows):
+            return None
+        result = self._rows[self.rownumber]
+        self.rownumber += 1
+        return result
+
+    def fetchmany(self, size=None):
+        """Fetch several rows"""
+        self._check_executed()
+        if self._rows is None:
+            return ()
+        end = self.rownumber + (size or self.arraysize)
+        result = self._rows[self.rownumber : end]
+        self.rownumber = min(end, len(self._rows))
+        return result
+
+    def fetchall(self):
+        """Fetch all the rows"""
+        self._check_executed()
+        if self._rows is None:
+            return ()
+        if self.rownumber:
+            result = self._rows[self.rownumber :]
+        else:
+            result = self._rows
+        self.rownumber = len(self._rows)
+        return result
+
+    def scroll(self, value, mode="relative"):
+        self._check_executed()
+        if mode == "relative":
+            r = self.rownumber + value
+        elif mode == "absolute":
+            r = value
+        else:
+            raise err.ProgrammingError("unknown scroll mode %s" % mode)
+
+        if not (0 <= r < len(self._rows)):
+            raise IndexError("out of range")
+        self.rownumber = r
+
+    def _query(self, q):
+        conn = self._get_db()
+        self._last_executed = q
+        self._clear_result()
+        conn.query(q)
+        self._do_get_result()
+        return self.rowcount
+
+    def _clear_result(self):
+        self.rownumber = 0
+        self._result = None
+
+        self.rowcount = 0
+        self.description = None
+        self.lastrowid = None
+        self._rows = None
+
+    def _do_get_result(self):
+        conn = self._get_db()
+
+        self._result = result = conn._result
+
+        self.rowcount = result.affected_rows
+        self.description = result.description
+        self.lastrowid = result.insert_id
+        self._rows = result.rows
+
+    def __iter__(self):
+        return iter(self.fetchone, None)
+
+    Warning = err.Warning
+    Error = err.Error
+    InterfaceError = err.InterfaceError
+    DatabaseError = err.DatabaseError
+    DataError = err.DataError
+    OperationalError = err.OperationalError
+    IntegrityError = err.IntegrityError
+    InternalError = err.InternalError
+    ProgrammingError = err.ProgrammingError
+    NotSupportedError = err.NotSupportedError
+
+
+class DictCursorMixin:
+    # You can override this to use OrderedDict or other dict-like types.
+    dict_type = dict
+
+    def _do_get_result(self):
+        super(DictCursorMixin, self)._do_get_result()
+        fields = []
+        if self.description:
+            for f in self._result.fields:
+                name = f.name
+                if name in fields:
+                    name = f.table_name + "." + name
+                fields.append(name)
+            self._fields = fields
+
+        if fields and self._rows:
+            self._rows = [self._conv_row(r) for r in self._rows]
+
+    def _conv_row(self, row):
+        if row is None:
+            return None
+        return self.dict_type(zip(self._fields, row))
+
+
+class DictCursor(DictCursorMixin, Cursor):
+    """A cursor which returns results as a dictionary"""
+
+
+class SSCursor(Cursor):
+    """
+    Unbuffered Cursor, mainly useful for queries that return a lot of data,
+    or for connections to remote servers over a slow network.
+
+    Instead of copying every row of data into a buffer, this will fetch
+    rows as needed. The upside of this is the client uses much less memory,
+    and rows are returned much faster when traveling over a slow network
+    or if the result set is very big.
+
+    There are limitations, though. The MySQL protocol doesn't support
+    returning the total number of rows, so the only way to tell how many rows
+    there are is to iterate over every row returned. Also, it currently isn't
+    possible to scroll backwards, as only the current row is held in memory.
+    """
+
+    def _conv_row(self, row):
+        return row
+
+    def close(self):
+        conn = self.connection
+        if conn is None:
+            return
+
+        if self._result is not None and self._result is conn._result:
+            self._result._finish_unbuffered_query()
+
+        try:
+            while self.nextset():
+                pass
+        finally:
+            self.connection = None
+
+    __del__ = close
+
+    def _query(self, q):
+        conn = self._get_db()
+        self._last_executed = q
+        self._clear_result()
+        conn.query(q, unbuffered=True)
+        self._do_get_result()
+        return self.rowcount
+
+    def nextset(self):
+        return self._nextset(unbuffered=True)
+
+    def read_next(self):
+        """Read next row"""
+        return self._conv_row(self._result._read_rowdata_packet_unbuffered())
+
+    def fetchone(self):
+        """Fetch next row"""
+        self._check_executed()
+        row = self.read_next()
+        if row is None:
+            return None
+        self.rownumber += 1
+        return row
+
+    def fetchall(self):
+        """
+        Fetch all, as per MySQLdb. Pretty useless for large queries, as
+        it is buffered. See fetchall_unbuffered(), if you want an unbuffered
+        generator version of this method.
+        """
+        return list(self.fetchall_unbuffered())
+
+    def fetchall_unbuffered(self):
+        """
+        Fetch all, implemented as a generator, which isn't to standard,
+        however, it doesn't make sense to return everything in a list, as that
+        would use ridiculous memory for large result sets.
+        """
+        return iter(self.fetchone, None)
+
+    def __iter__(self):
+        return self.fetchall_unbuffered()
+
+    def fetchmany(self, size=None):
+        """Fetch many"""
+        self._check_executed()
+        if size is None:
+            size = self.arraysize
+
+        rows = []
+        for i in range(size):
+            row = self.read_next()
+            if row is None:
+                break
+            rows.append(row)
+            self.rownumber += 1
+        return rows
+
+    def scroll(self, value, mode="relative"):
+        self._check_executed()
+
+        if mode == "relative":
+            if value < 0:
+                raise err.NotSupportedError(
+                    "Backwards scrolling not supported by this cursor"
+                )
+
+            for _ in range(value):
+                self.read_next()
+            self.rownumber += value
+        elif mode == "absolute":
+            if value < self.rownumber:
+                raise err.NotSupportedError(
+                    "Backwards scrolling not supported by this cursor"
+                )
+
+            end = value - self.rownumber
+            for _ in range(end):
+                self.read_next()
+            self.rownumber = value
+        else:
+            raise err.ProgrammingError("unknown scroll mode %s" % mode)
+
+
+class SSDictCursor(DictCursorMixin, SSCursor):
+    """An unbuffered cursor, which returns results as a dictionary"""

+ 143 - 0
ambt-preden-monthlymigration-dev/package/pymysql/err.py

@@ -0,0 +1,143 @@
+import struct
+
+from .constants import ER
+
+
+class MySQLError(Exception):
+    """Exception related to operation with MySQL."""
+
+
+class Warning(Warning, MySQLError):
+    """Exception raised for important warnings like data truncations
+    while inserting, etc."""
+
+
+class Error(MySQLError):
+    """Exception that is the base class of all other error exceptions
+    (not Warning)."""
+
+
+class InterfaceError(Error):
+    """Exception raised for errors that are related to the database
+    interface rather than the database itself."""
+
+
+class DatabaseError(Error):
+    """Exception raised for errors that are related to the
+    database."""
+
+
+class DataError(DatabaseError):
+    """Exception raised for errors that are due to problems with the
+    processed data like division by zero, numeric value out of range,
+    etc."""
+
+
+class OperationalError(DatabaseError):
+    """Exception raised for errors that are related to the database's
+    operation and not necessarily under the control of the programmer,
+    e.g. an unexpected disconnect occurs, the data source name is not
+    found, a transaction could not be processed, a memory allocation
+    error occurred during processing, etc."""
+
+
+class IntegrityError(DatabaseError):
+    """Exception raised when the relational integrity of the database
+    is affected, e.g. a foreign key check fails, duplicate key,
+    etc."""
+
+
+class InternalError(DatabaseError):
+    """Exception raised when the database encounters an internal
+    error, e.g. the cursor is not valid anymore, the transaction is
+    out of sync, etc."""
+
+
+class ProgrammingError(DatabaseError):
+    """Exception raised for programming errors, e.g. table not found
+    or already exists, syntax error in the SQL statement, wrong number
+    of parameters specified, etc."""
+
+
+class NotSupportedError(DatabaseError):
+    """Exception raised in case a method or database API was used
+    which is not supported by the database, e.g. requesting a
+    .rollback() on a connection that does not support transaction or
+    has transactions turned off."""
+
+
+error_map = {}
+
+
+def _map_error(exc, *errors):
+    for error in errors:
+        error_map[error] = exc
+
+
+_map_error(
+    ProgrammingError,
+    ER.DB_CREATE_EXISTS,
+    ER.SYNTAX_ERROR,
+    ER.PARSE_ERROR,
+    ER.NO_SUCH_TABLE,
+    ER.WRONG_DB_NAME,
+    ER.WRONG_TABLE_NAME,
+    ER.FIELD_SPECIFIED_TWICE,
+    ER.INVALID_GROUP_FUNC_USE,
+    ER.UNSUPPORTED_EXTENSION,
+    ER.TABLE_MUST_HAVE_COLUMNS,
+    ER.CANT_DO_THIS_DURING_AN_TRANSACTION,
+    ER.WRONG_DB_NAME,
+    ER.WRONG_COLUMN_NAME,
+)
+_map_error(
+    DataError,
+    ER.WARN_DATA_TRUNCATED,
+    ER.WARN_NULL_TO_NOTNULL,
+    ER.WARN_DATA_OUT_OF_RANGE,
+    ER.NO_DEFAULT,
+    ER.PRIMARY_CANT_HAVE_NULL,
+    ER.DATA_TOO_LONG,
+    ER.DATETIME_FUNCTION_OVERFLOW,
+    ER.TRUNCATED_WRONG_VALUE_FOR_FIELD,
+    ER.ILLEGAL_VALUE_FOR_TYPE,
+)
+_map_error(
+    IntegrityError,
+    ER.DUP_ENTRY,
+    ER.NO_REFERENCED_ROW,
+    ER.NO_REFERENCED_ROW_2,
+    ER.ROW_IS_REFERENCED,
+    ER.ROW_IS_REFERENCED_2,
+    ER.CANNOT_ADD_FOREIGN,
+    ER.BAD_NULL_ERROR,
+)
+_map_error(
+    NotSupportedError,
+    ER.WARNING_NOT_COMPLETE_ROLLBACK,
+    ER.NOT_SUPPORTED_YET,
+    ER.FEATURE_DISABLED,
+    ER.UNKNOWN_STORAGE_ENGINE,
+)
+_map_error(
+    OperationalError,
+    ER.DBACCESS_DENIED_ERROR,
+    ER.ACCESS_DENIED_ERROR,
+    ER.CON_COUNT_ERROR,
+    ER.TABLEACCESS_DENIED_ERROR,
+    ER.COLUMNACCESS_DENIED_ERROR,
+    ER.CONSTRAINT_FAILED,
+    ER.LOCK_DEADLOCK,
+)
+
+
+del _map_error, ER
+
+
+def raise_mysql_exception(data):
+    errno = struct.unpack("<h", data[1:3])[0]
+    errval = data[9:].decode("utf-8", "replace")
+    errorclass = error_map.get(errno)
+    if errorclass is None:
+        errorclass = InternalError if errno < 1000 else OperationalError
+    raise errorclass(errno, errval)

+ 18 - 0
ambt-preden-monthlymigration-dev/package/pymysql/optionfile.py

@@ -0,0 +1,18 @@
+import configparser
+
+
+class Parser(configparser.RawConfigParser):
+    def __init__(self, **kwargs):
+        kwargs["allow_no_value"] = True
+        configparser.RawConfigParser.__init__(self, **kwargs)
+
+    def __remove_quotes(self, value):
+        quotes = ["'", '"']
+        for quote in quotes:
+            if len(value) >= 2 and value[0] == value[-1] == quote:
+                return value[1:-1]
+        return value
+
+    def get(self, section, option):
+        value = configparser.RawConfigParser.get(self, section, option)
+        return self.__remove_quotes(value)

+ 358 - 0
ambt-preden-monthlymigration-dev/package/pymysql/protocol.py

@@ -0,0 +1,358 @@
+# Python implementation of low level MySQL client-server protocol
+# http://dev.mysql.com/doc/internals/en/client-server-protocol.html
+
+from .charset import MBLENGTH
+from .constants import FIELD_TYPE, SERVER_STATUS
+from . import err
+
+import struct
+import sys
+
+
+DEBUG = False
+
+NULL_COLUMN = 251
+UNSIGNED_CHAR_COLUMN = 251
+UNSIGNED_SHORT_COLUMN = 252
+UNSIGNED_INT24_COLUMN = 253
+UNSIGNED_INT64_COLUMN = 254
+
+
+def dump_packet(data):  # pragma: no cover
+    def printable(data):
+        if 32 <= data < 127:
+            return chr(data)
+        return "."
+
+    try:
+        print("packet length:", len(data))
+        for i in range(1, 7):
+            f = sys._getframe(i)
+            print("call[%d]: %s (line %d)" % (i, f.f_code.co_name, f.f_lineno))
+        print("-" * 66)
+    except ValueError:
+        pass
+    dump_data = [data[i : i + 16] for i in range(0, min(len(data), 256), 16)]
+    for d in dump_data:
+        print(
+            " ".join("{:02X}".format(x) for x in d)
+            + "   " * (16 - len(d))
+            + " " * 2
+            + "".join(printable(x) for x in d)
+        )
+    print("-" * 66)
+    print()
+
+
+class MysqlPacket:
+    """Representation of a MySQL response packet.
+
+    Provides an interface for reading/parsing the packet results.
+    """
+
+    __slots__ = ("_position", "_data")
+
+    def __init__(self, data, encoding):
+        self._position = 0
+        self._data = data
+
+    def get_all_data(self):
+        return self._data
+
+    def read(self, size):
+        """Read the first 'size' bytes in packet and advance cursor past them."""
+        result = self._data[self._position : (self._position + size)]
+        if len(result) != size:
+            error = (
+                "Result length not requested length:\n"
+                "Expected=%s.  Actual=%s.  Position: %s.  Data Length: %s"
+                % (size, len(result), self._position, len(self._data))
+            )
+            if DEBUG:
+                print(error)
+                self.dump()
+            raise AssertionError(error)
+        self._position += size
+        return result
+
+    def read_all(self):
+        """Read all remaining data in the packet.
+
+        (Subsequent read() will return errors.)
+        """
+        result = self._data[self._position :]
+        self._position = None  # ensure no subsequent read()
+        return result
+
+    def advance(self, length):
+        """Advance the cursor in data buffer 'length' bytes."""
+        new_position = self._position + length
+        if new_position < 0 or new_position > len(self._data):
+            raise Exception(
+                "Invalid advance amount (%s) for cursor.  "
+                "Position=%s" % (length, new_position)
+            )
+        self._position = new_position
+
+    def rewind(self, position=0):
+        """Set the position of the data buffer cursor to 'position'."""
+        if position < 0 or position > len(self._data):
+            raise Exception("Invalid position to rewind cursor to: %s." % position)
+        self._position = position
+
+    def get_bytes(self, position, length=1):
+        """Get 'length' bytes starting at 'position'.
+
+        Position is start of payload (first four packet header bytes are not
+        included) starting at index '0'.
+
+        No error checking is done.  If requesting outside end of buffer
+        an empty string (or string shorter than 'length') may be returned!
+        """
+        return self._data[position : (position + length)]
+
+    def read_uint8(self):
+        result = self._data[self._position]
+        self._position += 1
+        return result
+
+    def read_uint16(self):
+        result = struct.unpack_from("<H", self._data, self._position)[0]
+        self._position += 2
+        return result
+
+    def read_uint24(self):
+        low, high = struct.unpack_from("<HB", self._data, self._position)
+        self._position += 3
+        return low + (high << 16)
+
+    def read_uint32(self):
+        result = struct.unpack_from("<I", self._data, self._position)[0]
+        self._position += 4
+        return result
+
+    def read_uint64(self):
+        result = struct.unpack_from("<Q", self._data, self._position)[0]
+        self._position += 8
+        return result
+
+    def read_string(self):
+        end_pos = self._data.find(b"\0", self._position)
+        if end_pos < 0:
+            return None
+        result = self._data[self._position : end_pos]
+        self._position = end_pos + 1
+        return result
+
+    def read_length_encoded_integer(self):
+        """Read a 'Length Coded Binary' number from the data buffer.
+
+        Length coded numbers can be anywhere from 1 to 9 bytes depending
+        on the value of the first byte.
+        """
+        c = self.read_uint8()
+        if c == NULL_COLUMN:
+            return None
+        if c < UNSIGNED_CHAR_COLUMN:
+            return c
+        elif c == UNSIGNED_SHORT_COLUMN:
+            return self.read_uint16()
+        elif c == UNSIGNED_INT24_COLUMN:
+            return self.read_uint24()
+        elif c == UNSIGNED_INT64_COLUMN:
+            return self.read_uint64()
+
+    def read_length_coded_string(self):
+        """Read a 'Length Coded String' from the data buffer.
+
+        A 'Length Coded String' consists first of a length coded
+        (unsigned, positive) integer represented in 1-9 bytes followed by
+        that many bytes of binary data.  (For example "cat" would be "3cat".)
+        """
+        length = self.read_length_encoded_integer()
+        if length is None:
+            return None
+        return self.read(length)
+
+    def read_struct(self, fmt):
+        s = struct.Struct(fmt)
+        result = s.unpack_from(self._data, self._position)
+        self._position += s.size
+        return result
+
+    def is_ok_packet(self):
+        # https://dev.mysql.com/doc/internals/en/packet-OK_Packet.html
+        return self._data[0] == 0 and len(self._data) >= 7
+
+    def is_eof_packet(self):
+        # http://dev.mysql.com/doc/internals/en/generic-response-packets.html#packet-EOF_Packet
+        # Caution: \xFE may be LengthEncodedInteger.
+        # If \xFE is LengthEncodedInteger header, 8bytes followed.
+        return self._data[0] == 0xFE and len(self._data) < 9
+
+    def is_auth_switch_request(self):
+        # http://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::AuthSwitchRequest
+        return self._data[0] == 0xFE
+
+    def is_extra_auth_data(self):
+        # https://dev.mysql.com/doc/internals/en/successful-authentication.html
+        return self._data[0] == 1
+
+    def is_resultset_packet(self):
+        field_count = self._data[0]
+        return 1 <= field_count <= 250
+
+    def is_load_local_packet(self):
+        return self._data[0] == 0xFB
+
+    def is_error_packet(self):
+        return self._data[0] == 0xFF
+
+    def check_error(self):
+        if self.is_error_packet():
+            self.raise_for_error()
+
+    def raise_for_error(self):
+        self.rewind()
+        self.advance(1)  # field_count == error (we already know that)
+        errno = self.read_uint16()
+        if DEBUG:
+            print("errno =", errno)
+        err.raise_mysql_exception(self._data)
+
+    def dump(self):
+        dump_packet(self._data)
+
+
+class FieldDescriptorPacket(MysqlPacket):
+    """A MysqlPacket that represents a specific column's metadata in the result.
+
+    Parsing is automatically done and the results are exported via public
+    attributes on the class such as: db, table_name, name, length, type_code.
+    """
+
+    def __init__(self, data, encoding):
+        MysqlPacket.__init__(self, data, encoding)
+        self._parse_field_descriptor(encoding)
+
+    def _parse_field_descriptor(self, encoding):
+        """Parse the 'Field Descriptor' (Metadata) packet.
+
+        This is compatible with MySQL 4.1+ (not compatible with MySQL 4.0).
+        """
+        self.catalog = self.read_length_coded_string()
+        self.db = self.read_length_coded_string()
+        self.table_name = self.read_length_coded_string().decode(encoding)
+        self.org_table = self.read_length_coded_string().decode(encoding)
+        self.name = self.read_length_coded_string().decode(encoding)
+        self.org_name = self.read_length_coded_string().decode(encoding)
+        (
+            self.charsetnr,
+            self.length,
+            self.type_code,
+            self.flags,
+            self.scale,
+        ) = self.read_struct("<xHIBHBxx")
+        # 'default' is a length coded binary and is still in the buffer?
+        # not used for normal result sets...
+
+    def description(self):
+        """Provides a 7-item tuple compatible with the Python PEP249 DB Spec."""
+        return (
+            self.name,
+            self.type_code,
+            None,  # TODO: display_length; should this be self.length?
+            self.get_column_length(),  # 'internal_size'
+            self.get_column_length(),  # 'precision'  # TODO: why!?!?
+            self.scale,
+            self.flags % 2 == 0,
+        )
+
+    def get_column_length(self):
+        if self.type_code == FIELD_TYPE.VAR_STRING:
+            mblen = MBLENGTH.get(self.charsetnr, 1)
+            return self.length // mblen
+        return self.length
+
+    def __str__(self):
+        return "%s %r.%r.%r, type=%s, flags=%x" % (
+            self.__class__,
+            self.db,
+            self.table_name,
+            self.name,
+            self.type_code,
+            self.flags,
+        )
+
+
+class OKPacketWrapper:
+    """
+    OK Packet Wrapper. It uses an existing packet object, and wraps
+    around it, exposing useful variables while still providing access
+    to the original packet objects variables and methods.
+    """
+
+    def __init__(self, from_packet):
+        if not from_packet.is_ok_packet():
+            raise ValueError(
+                "Cannot create "
+                + str(self.__class__.__name__)
+                + " object from invalid packet type"
+            )
+
+        self.packet = from_packet
+        self.packet.advance(1)
+
+        self.affected_rows = self.packet.read_length_encoded_integer()
+        self.insert_id = self.packet.read_length_encoded_integer()
+        self.server_status, self.warning_count = self.read_struct("<HH")
+        self.message = self.packet.read_all()
+        self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
+
+    def __getattr__(self, key):
+        return getattr(self.packet, key)
+
+
+class EOFPacketWrapper:
+    """
+    EOF Packet Wrapper. It uses an existing packet object, and wraps
+    around it, exposing useful variables while still providing access
+    to the original packet objects variables and methods.
+    """
+
+    def __init__(self, from_packet):
+        if not from_packet.is_eof_packet():
+            raise ValueError(
+                f"Cannot create '{self.__class__}' object from invalid packet type"
+            )
+
+        self.packet = from_packet
+        self.warning_count, self.server_status = self.packet.read_struct("<xhh")
+        if DEBUG:
+            print("server_status=", self.server_status)
+        self.has_next = self.server_status & SERVER_STATUS.SERVER_MORE_RESULTS_EXISTS
+
+    def __getattr__(self, key):
+        return getattr(self.packet, key)
+
+
+class LoadLocalPacketWrapper:
+    """
+    Load Local Packet Wrapper. It uses an existing packet object, and wraps
+    around it, exposing useful variables while still providing access
+    to the original packet objects variables and methods.
+    """
+
+    def __init__(self, from_packet):
+        if not from_packet.is_load_local_packet():
+            raise ValueError(
+                f"Cannot create '{self.__class__}' object from invalid packet type"
+            )
+
+        self.packet = from_packet
+        self.filename = self.packet.get_all_data()[1:]
+        if DEBUG:
+            print("filename=", self.filename)
+
+    def __getattr__(self, key):
+        return getattr(self.packet, key)

+ 20 - 0
ambt-preden-monthlymigration-dev/package/pymysql/times.py

@@ -0,0 +1,20 @@
+from time import localtime
+from datetime import date, datetime, time, timedelta
+
+
+Date = date
+Time = time
+TimeDelta = timedelta
+Timestamp = datetime
+
+
+def DateFromTicks(ticks):
+    return date(*localtime(ticks)[:3])
+
+
+def TimeFromTicks(ticks):
+    return time(*localtime(ticks)[3:6])
+
+
+def TimestampFromTicks(ticks):
+    return datetime(*localtime(ticks)[:6])