Codebase list aiopg / ed79eb9
Update upstream source from tag 'upstream/0.16.0' Update to upstream version '0.16.0' with Debian dir b12047e2ecad0de1e638923439b91f10cd4e375e Piotr Ożarowski 4 years ago
11 changed file(s) with 133 addition(s) and 33 deletion(s). Raw diff Collapse all Expand all
00 CHANGES
11 -------
22
3 0.16.0 (2019-01-25)
4 ^^^^^^^^^^^^^^^^^^^
5
6 * Fix select priority name (#525)
7
8 * Rename `psycopg2` to `psycopg2-binary` to fix deprecation warning (#507)
9
10 * Fix #189 hstore when using ReadDictCursor (#512)
11
12 * close cannot be used while an asynchronous query is underway (#452)
13
14 * sqlalchemy adapter trx begin allow transaction_mode (#498)
15
16
317 0.15.0 (2018-08-14)
418 ^^^^^^^^^^^^^^^^^^^
519
00 Metadata-Version: 2.1
11 Name: aiopg
2 Version: 0.15.0
2 Version: 0.16.0
33 Summary: Postgres integration with asyncio.
44 Home-page: https://aiopg.readthedocs.io
55 Author: Andrew Svetlov
9696
9797 CHANGES
9898 -------
99
100 0.16.0 (2019-01-25)
101 ^^^^^^^^^^^^^^^^^^^
102
103 * Fix select priority name (#525)
104
105 * Rename `psycopg2` to `psycopg2-binary` to fix deprecation warning (#507)
106
107 * Fix #189 hstore when using ReadDictCursor (#512)
108
109 * close cannot be used while an asynchronous query is underway (#452)
110
111 * sqlalchemy adapter trx begin allow transaction_mode (#498)
112
99113
100114 0.15.0 (2018-08-14)
101115 ^^^^^^^^^^^^^^^^^^^
1010 'version', 'version_info', 'DEFAULT_TIMEOUT', 'IsolationLevel',
1111 'Transaction')
1212
13 __version__ = '0.15.0'
13 __version__ = '0.16.0'
1414
1515 version = __version__ + ' , Python ' + sys.version
1616
3838 """)
3939 rv0, rv1 = [], []
4040 for oids in (yield from cur.fetchall()):
41 rv0.append(oids[0])
42 rv1.append(oids[1])
41 if isinstance(oids, dict):
42 rv0.append(oids['oid'])
43 rv1.append(oids['typarray'])
44 else:
45 rv0.append(oids[0])
46 rv1.append(oids[1])
4347
4448 cur.close()
4549 return tuple(rv0), tuple(rv1)
116120 self._cancelling = False
117121 self._cancellation_waiter = None
118122 self._echo = echo
123 self._conn_cursor = None
119124 self._notifies = asyncio.Queue(loop=loop)
120125 self._weakref = weakref.ref(self)
121126 self._loop.add_reader(self._fileno, self._ready, self._weakref)
127
122128 if loop.get_debug():
123129 self._source_traceback = traceback.extract_stack(sys._getframe(1))
124130
263269 *name*, *scrollable* and *withhold* parameters are not supported by
264270 psycopg in asynchronous mode.
265271
272 NOTE: as of [TODO] any previously created created cursor from this
273 connection will be closed
266274 """
275 self.close_cursor()
276
267277 self._last_usage = self._loop.time()
268278 coro = self._cursor(name=name, cursor_factory=cursor_factory,
269279 scrollable=scrollable, withhold=withhold,
270280 timeout=timeout)
271281 return _ContextManager(coro)
272282
283 def cursor_created(self, cursor):
284 if self._conn_cursor and not self._conn_cursor.closed:
285 raise Exception("You can only have one cursor per connection")
286
287 self._conn_cursor = cursor
288
289 def cursor_closed(self, cursor):
290 if cursor != self._conn_cursor:
291 raise Exception("You can only have one cursor per connection")
292
293 self._conn_cursor = None
294
273295 @asyncio.coroutine
274296 def _cursor(self, name=None, cursor_factory=None,
275297 scrollable=None, withhold=False, timeout=None):
280302 cursor_factory=cursor_factory,
281303 scrollable=scrollable,
282304 withhold=withhold)
283 return Cursor(self, impl, timeout, self._echo)
305 cursor = Cursor(self, impl, timeout, self._echo)
306 return cursor
284307
285308 @asyncio.coroutine
286309 def _cursor_impl(self, name=None, cursor_factory=None,
302325 if self._writing:
303326 self._writing = False
304327 self._loop.remove_writer(self._fileno)
328
329 self.close_cursor()
305330 self._conn.close()
331
306332 if self._waiter is not None and not self._waiter.done():
307333 self._waiter.set_exception(
308334 psycopg2.OperationalError("Connection closed"))
312338 ret = create_future(self._loop)
313339 ret.set_result(None)
314340 return ret
341
342 def close_cursor(self):
343 if self._conn_cursor:
344 self._conn_cursor.close()
315345
316346 @property
317347 def closed(self):
1414 self._timeout = timeout
1515 self._echo = echo
1616 self._transaction = Transaction(self, IsolationLevel.repeatable_read)
17
18 conn.cursor_created(self)
1719
1820 @property
1921 def echo(self):
4749
4850 def close(self):
4951 """Close the cursor now."""
50 self._impl.close()
52 if not self.closed:
53 self._impl.close()
54 self._conn.cursor_closed(self)
5155
5256 @property
5357 def closed(self):
256256 if self._closing:
257257 conn.close()
258258 else:
259 conn.close_cursor() # there may be weak-refs to these cursors
259260 self._free.append(conn)
260261 fut = ensure_future(self._wakeup(), loop=self._loop)
261262 return fut
133133 def connection(self):
134134 return self._connection
135135
136 def begin(self):
136 def begin(self, isolation_level=None, readonly=False, deferrable=False):
137137 """Begin a transaction and return a transaction handle.
138
139 isolation_level - The isolation level of the transaction,
140 should be one of 'SERIALIZABLE', 'REPEATABLE READ', 'READ COMMITTED',
141 'READ UNCOMMITTED', default (None) is 'READ COMMITTED'
142
143 readonly - The transaction is read only
144
145 deferrable - The transaction may block when acquiring data before
146 running without overhead of SERLIALIZABLE, has no effect unless
147 transaction is both SERIALIZABLE and readonly
138148
139149 The returned object is an instance of Transaction. This
140150 object represents the "scope" of the transaction, which
160170 .begin_twophase - use a two phase/XA transaction
161171
162172 """
163 coro = self._begin()
173 coro = self._begin(isolation_level, readonly, deferrable)
164174 return _TransactionContextManager(coro)
165175
166176 @asyncio.coroutine
167 def _begin(self):
177 def _begin(self, isolation_level, readonly, deferrable):
168178 if self._transaction is None:
169179 self._transaction = RootTransaction(self)
170 yield from self._begin_impl()
180 yield from self._begin_impl(isolation_level, readonly, deferrable)
171181 return self._transaction
172182 else:
173183 return Transaction(self, self._transaction)
174184
175185 @asyncio.coroutine
176 def _begin_impl(self):
177 cur = yield from self._connection.cursor()
178 try:
179 yield from cur.execute('BEGIN')
186 def _begin_impl(self, isolation_level, readonly, deferrable):
187 stmt = 'BEGIN'
188 if isolation_level is not None:
189 stmt += ' ISOLATION LEVEL ' + isolation_level
190 if readonly:
191 stmt += ' READ ONLY'
192 if deferrable:
193 stmt += ' DEFERRABLE'
194
195 cur = yield from self._connection.cursor()
196 try:
197 yield from cur.execute(stmt)
180198 finally:
181199 cur.close()
182200
216234 def _begin_nested(self):
217235 if self._transaction is None:
218236 self._transaction = RootTransaction(self)
219 yield from self._begin_impl()
237 yield from self._begin_impl(None, False, False)
220238 else:
221239 self._transaction = NestedTransaction(self, self._transaction)
222240 self._transaction._savepoint = yield from self._savepoint_impl()
8888 def __init__(self, result_proxy, metadata):
8989 self._processors = processors = []
9090
91 result_map = {}
92 if result_proxy._result_map:
93 result_map = {elem[0]: elem[3] for elem in
94 result_proxy._result_map}
91 map_type, map_column_name = self.result_map(result_proxy._result_map)
9592
9693 # We do not strictly need to store the processor in the key mapping,
9794 # though it is faster in the Python version (probably because of the
123120 # colname = dialect.normalize_name(colname)
124121
125122 name, obj, type_ = (
126 colname,
123 map_column_name.get(colname, colname),
127124 None,
128 result_map.get(
129 colname,
130 typemap.get(coltype, sqltypes.NULLTYPE))
125 map_type.get(colname, typemap.get(coltype, sqltypes.NULLTYPE))
131126 )
132127
133128 processor = type_._cached_result_processor(dialect, coltype)
149144 # unambiguous.
150145 primary_keymap[name] = rec = (None, obj, None)
151146
152 self.keys.append(colname)
147 self.keys.append(name)
153148 if obj:
154149 for o in obj:
155150 keymap[o] = rec
161156 # overwrite keymap values with those of the
162157 # high precedence keymap.
163158 keymap.update(primary_keymap)
159
160 def result_map(self, data_map):
161 data_map = data_map or {}
162 map_type = {}
163 map_column_name = {}
164 for elem in data_map:
165 name = elem[0]
166 priority_name = getattr(elem[2][0], 'key', name)
167 map_type[name] = elem[3] # type column
168 map_column_name[name] = priority_name
169
170 return map_type, map_column_name
164171
165172 def _key_fallback(self, key, raiseerr=True):
166173 map = self._keymap
174181 elif isinstance(key, expression.ColumnElement):
175182 if (key._label and key._label in map):
176183 result = map[key._label]
177 elif (hasattr(key, 'name') and key.name in map):
184 elif (hasattr(key, 'key') and key.key in map):
178185 # match is only on name.
179 result = map[key.name]
186 result = map[key.key]
180187 # search extra hard to make sure this
181188 # isn't a column/label name overlap.
182189 # this check isn't currently available if the row
00 Metadata-Version: 2.1
11 Name: aiopg
2 Version: 0.15.0
2 Version: 0.16.0
33 Summary: Postgres integration with asyncio.
44 Home-page: https://aiopg.readthedocs.io
55 Author: Andrew Svetlov
9696
9797 CHANGES
9898 -------
99
100 0.16.0 (2019-01-25)
101 ^^^^^^^^^^^^^^^^^^^
102
103 * Fix select priority name (#525)
104
105 * Rename `psycopg2` to `psycopg2-binary` to fix deprecation warning (#507)
106
107 * Fix #189 hstore when using ReadDictCursor (#512)
108
109 * close cannot be used while an asynchronous query is underway (#452)
110
111 * sqlalchemy adapter trx begin allow transaction_mode (#498)
112
99113
100114 0.15.0 (2018-08-14)
101115 ^^^^^^^^^^^^^^^^^^^
0 psycopg2>=2.7.0
0 psycopg2-binary>=2.7.0
11
22 [sa]
3 sqlalchemy>=1.1
3 sqlalchemy[postgresql_psycopg2binary]>=1.1
33 from setuptools import setup
44
55
6 install_requires = ['psycopg2>=2.7.0']
6 install_requires = ['psycopg2-binary>=2.7.0']
7 extras_require = {'sa': ['sqlalchemy[postgresql_psycopg2binary]>=1.1']}
78
89 PY_VER = sys.version_info
910
1314
1415 def read(f):
1516 return open(os.path.join(os.path.dirname(__file__), f)).read().strip()
16
17
18 extras_require = {'sa': ['sqlalchemy>=1.1'], }
1917
2018
2119 def read_version():