Here are the examples of the python api sqlalchemy.orm.make_transient taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
9 Examples
3
View Complete Implementation : file_behavior.py
Copyright Apache License 2.0
Author : airbnb
Copyright Apache License 2.0
Author : airbnb
def copy_file(self, file_id):
file_object = get_file_by_id(self._db_session, file_id)
self._check_file_lock(file_object)
parent_folder = self._get_parent()
self._db_session.expunge(file_object)
make_transient(file_object)
file_object.id = None
file_object.file_id = None
file_object.parent_id = parent_folder.id
self._db_session.add(file_object)
self._db_session.commit()
self._db_session.add(EventModel(event_type='ITEM_COPY', source_id=file_object.file_id, source_type='file'))
self._db_session.commit()
return json.dumps(file_object)
3
View Complete Implementation : folder_behavior.py
Copyright Apache License 2.0
Author : airbnb
Copyright Apache License 2.0
Author : airbnb
def copy_folder(self, folder_id):
folder = get_folder_by_id(self._db_session, folder_id)
parent_folder = self._get_parent()
self._db_session.expunge(folder)
make_transient(folder)
folder.id = None
folder.folder_id = None
folder.parent_id = parent_folder.id
self._db_session.add(folder)
self._db_session.commit()
self._db_session.add(EventModel(event_type='ITEM_COPY', source_id=folder.folder_id, source_type='folder'))
self._db_session.commit()
return json.dumps(folder)
3
View Complete Implementation : versioned_rows.py
Copyright MIT License
Author : sqlalchemy
Copyright MIT License
Author : sqlalchemy
def new_version(self, session):
# make us transient (removes persistent
# idensaty).
make_transient(self)
# set 'id' to None.
# a new PK will be generated on INSERT.
self.id = None
3
View Complete Implementation : versioned_rows_w_versionid.py
Copyright MIT License
Author : sqlalchemy
Copyright MIT License
Author : sqlalchemy
def new_version(self, session):
# optional - set previous version to have is_current_version=False
old_id = self.id
session.query(self.__clast__).filter_by(id=old_id).update(
values=dict(is_current_version=False), synchronize_session=False
)
# make us transient (removes persistent
# idensaty).
make_transient(self)
# increment version_id, which means we have a new PK.
self.version_id += 1
0
View Complete Implementation : queue_green_oa_scrape.py
Copyright MIT License
Author : ourresearch
Copyright MIT License
Author : ourresearch
def scrape_pages(pages):
for page in pages:
make_transient(page)
# free up the connection while doing net IO
db.session.close()
db.engine.dispose()
pool = get_worker_pool()
map_results = pool.map(scrape_with_timeout, pages, chunksize=1)
scraped_pages = [p for p in map_results if p]
logger.info(u'finished scraping all pages')
pool.close()
pool.join()
logger.info(u'preparing update records')
row_dicts = [x.__dict__ for x in scraped_pages]
for row_dict in row_dicts:
row_dict.pop('_sa_instance_state')
logger.info(u'saving update records')
db.session.bulk_update_mappings(PageNew, row_dicts)
scraped_page_ids = [p.id for p in scraped_pages]
return scraped_page_ids
0
View Complete Implementation : queue_pdf_url_check.py
Copyright MIT License
Author : ourresearch
Copyright MIT License
Author : ourresearch
def check_pdf_urls(pdf_urls):
for url in pdf_urls:
make_transient(url)
# free up the connection while doing net IO
safe_commit(db)
db.engine.dispose()
req_pool = get_request_pool()
checked_pdf_urls = req_pool.map(get_pdf_url_status, pdf_urls, chunksize=1)
req_pool.close()
req_pool.join()
row_dicts = [x.__dict__ for x in checked_pdf_urls]
for row_dict in row_dicts:
row_dict.pop('_sa_instance_state')
db.session.bulk_update_mappings(PdfUrl, row_dicts)
start_time = time()
commit_success = safe_commit(db)
if not commit_success:
logger.info(u"COMMIT fail")
logger.info(u"commit took {} seconds".format(elapsed(start_time, 2)))
0
View Complete Implementation : base.py
Copyright BSD 3-Clause "New" or "Revised" License
Author : remram44
Copyright BSD 3-Clause "New" or "Revised" License
Author : remram44
def notify_project(self, project_id, cmd):
astert isinstance(project_id, int)
make_transient(cmd)
for future in self.event_waiters.pop(project_id, []):
future.set_result(cmd)
0
View Complete Implementation : versioned_map.py
Copyright MIT License
Author : sqlalchemy
Copyright MIT License
Author : sqlalchemy
def new_version(self, session):
# convert to an INSERT
make_transient(self)
self.id = None
# history of the 'elements' collection.
# this is a tuple of groups: (added, unchanged, deleted)
hist = attributes.get_history(self, "elements")
# rewrite the 'elements' collection
# from scratch, removing all history
attributes.set_committed_value(self, "elements", {})
# new elements in the "added" group
# are moved to our new collection.
for elem in hist.added:
self.elements[elem.name] = elem
# copy elements in the 'unchanged' group.
# the new ones astociate with the new ConfigData,
# the old ones stay astociated with the old ConfigData
for elem in hist.unchanged:
self.elements[elem.name] = ConfigValueastociation(
elem.config_value
)
0
View Complete Implementation : versioned_update_old_row.py
Copyright MIT License
Author : sqlalchemy
Copyright MIT License
Author : sqlalchemy
def new_version(self, session):
# our current idensaty key, which will be used on the "old"
# version of us to emit an UPDATE. this is just for astertion purposes
old_idensaty_key = inspect(self).key
# make sure self.start / self.end are not expired
self.id, self.start, self.end
# turn us into an INSERT
make_transient(self)
# make the "old" version of us, which we will turn into an
# UPDATE
old_copy_of_us = self.__clast__(
id=self.id, start=self.start, end=self.end
)
# turn old_copy_of_us into an UPDATE
make_transient_to_detached(old_copy_of_us)
# the "old" object has our old idensaty key (that we no longer have)
astert inspect(old_copy_of_us).key == old_idensaty_key
# now put it back in the session
session.add(old_copy_of_us)
# now update the 'end' - SQLAlchemy sees this as a PK switch
old_copy_of_us.end = current_time()
# fun fact! the new_version() routine is *not* called for
# old_copy_of_us! because we are already in the before_flush() hook!
# this surprised even me. I was thinking we had to guard against
# it. Still might be a good idea to do so.
self.start = current_time()
self.end = current_time() + datetime.timedelta(days=2)