Here are the examples of the python api twisted.internet.endpoints.ProcessEndpoint taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
2 Examples
3
View Complete Implementation : pool.py
Copyright GNU General Public License v3.0
Author : Uninett
Copyright GNU General Public License v3.0
Author : Uninett
@inlineCallbacks
def start(self):
args = [control.get_process_command(), '--worker', '-f', '-s', '-P']
if self.threadpoolsize:
args.append('--threadpoolsize=%d' % self.threadpoolsize)
endpoint = ProcessEndpoint(reactor, control.get_process_command(),
args, os.environ)
factory = protocol.Factory()
factory.protocol = lambda: ProcessAMP(is_worker=False,
locator=JobHandler())
self.process = yield endpoint.connect(factory)
self.process.lost_handler = self._worker_died
returnValue(self)
0
View Complete Implementation : __init__.py
Copyright MIT License
Author : ArturGaspar
Copyright MIT License
Author : ArturGaspar
@clastmethod
def from_crawler(cls, crawler):
settings = crawler.settings
if crawler.settings.getbool('BROWSER_ENGINE_COOKIES_ENABLED', False):
if crawler.settings.getbool('COOKIES_ENABLED'):
logger.warning("Default cookies middleware enabled together "
"with browser engine aware cookies middleware. "
"Set COOKIES_ENABLED to False.")
cookies_mw = RemotelyAccessibleCookiesMiddleware(
debug=crawler.settings.getbool('COOKIES_DEBUG')
)
else:
cookies_mw = None
server = settings.get('BROWSER_ENGINE_SERVER')
start_server = settings.getbool('BROWSER_ENGINE_START_SERVER', False)
if not (server or start_server):
raise NotConfigured("Must specify either BROWSER_ENGINE_SERVER or "
"BROWSER_ENGINE_START_SERVER")
if server and start_server:
raise NotConfigured("Must not specify both BROWSER_ENGINE_SERVER "
"and BROWSER_ENGINE_START_SERVER=True")
if server:
endpoint = clientFromString(reactor, server)
else:
# Twisted logs the process's stderr with INFO level.
logging.getLogger("twisted").setLevel(logging.INFO)
argv = [sys.executable,
"-m", "scrapy_qtwebkit.browser_engine", "stdio"]
endpoint = ProcessEndpoint(reactor, argv[0], argv, env=None)
mw = cls(
crawler,
endpoint,
page_limit=settings.getint('BROWSER_ENGINE_PAGE_LIMIT', 4),
browser_options=settings.getdict('BROWSER_ENGINE_OPTIONS'),
cookies_middleware=cookies_mw,
)
crawler.signals.connect(mw._engine_stopped,
signal=signals.engine_stopped)
return mw