python - Avoiding reading out the blob -
i trying use data blob process data , send via email. of receiving following error - guess relates size of blob read memory, since happens bigger blobs:
traceback (most recent call last): file "/python27_runtime/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 1535, in __call__ rv = self.handle_exception(request, response, e) file "/python27_runtime/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 1529, in __call__ rv = self.router.dispatch(request, response) file "/python27_runtime/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 1278, in default_dispatcher return route.handler_adapter(request, response) file "/python27_runtime/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 1102, in __call__ return handler.dispatch() file "/python27_runtime/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 572, in dispatch return self.handle_exception(e, self.app.debug) file "/python27_runtime/python27_lib/versions/third_party/webapp2-2.5.2/webapp2.py", line 570, in dispatch return method(*args, **kwargs) file "/base/data/home/apps/xxxx/main.py", line 154, in post db.run_in_transaction(filtering) file "/python27_runtime/python27_lib/versions/1/google/appengine/api/datastore.py", line 2461, in runintransaction return runintransactionoptions(none, function, *args, **kwargs) file "/python27_runtime/python27_lib/versions/1/google/appengine/api/datastore.py", line 2599, in runintransactionoptions ok, result = _doonetry(new_connection, function, args, kwargs) file "/python27_runtime/python27_lib/versions/1/google/appengine/api/datastore.py", line 2621, in _doonetry result = function(*args, **kwargs) file "/base/data/home/apps/xxxx/main.py", line 128, in filtering k in liwkws: file "/python27_runtime/python27_lib/versions/1/google/appengine/ext/db/__init__.py", line 2326, in next return self.__model_class.from_entity(self.__iterator.next()) file "/python27_runtime/python27_lib/versions/1/google/appengine/datastore/datastore_query.py", line 2892, in next next_batch = self.__batcher.next() file "/python27_runtime/python27_lib/versions/1/google/appengine/datastore/datastore_query.py", line 2754, in next return self.next_batch(self.at_least_one) file "/python27_runtime/python27_lib/versions/1/google/appengine/datastore/datastore_query.py", line 2791, in next_batch batch = self.__next_batch.get_result() file "/python27_runtime/python27_lib/versions/1/google/appengine/api/apiproxy_stub_map.py", line 604, in get_result return self.__get_result_hook(self) file "/python27_runtime/python27_lib/versions/1/google/appengine/datastore/datastore_query.py", line 2528, in __query_result_hook self._batch_shared.conn.check_rpc_success(rpc) file "/python27_runtime/python27_lib/versions/1/google/appengine/datastore/datastore_rpc.py", line 1224, in check_rpc_success raise _todatastoreerror(err) badrequesterror: invalid handle: 16023202376614806719
this code:
#reads in variables former process filter_name = self.request.get('filter_name') user = self.request.get('user') lowkey = self.request.get('lowkey') def filtering(): # read blob memory low = blobstore.blobreader(lowkey).read() liwkws = db.gqlquery("select * filterlist ancestor :1", filter_key(filter_name)) # preparing data processing low = unicode(low, 'utf8').encode('utf-8').replace('\r', '').split('\n') j in range(len(low)): k in liwkws: if k.newkey.encode('utf-8').lower() in low[j].lower(): low[j] = 'delete' cuent = low.count('delete') in range(cuent): low.remove('delete') output_buffer = stringio.stringio() csv_output = csv.writer(output_buffer, delimiter=",") in low: csv_output.writerow([i]) result = output_buffer.getvalue()
any ideas? size of blob in case not large (3mb), , if don't read in directly have save list same size.