Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
pytest: mark xpay real gossip test as slow
``` ____________________ ERROR at teardown of test_xpay_maxfee _____________________ ... # Format a nice list of everything that went wrong and raise an exception request.node.has_errors = True > raise ValueError(str(errors)) E ValueError: E Node errors: E - lightningd-1: Node exited with return code 1 E Global errors: ``` And: ``` @unittest.skipIf(TEST_NETWORK != 'regtest', 'too dusty on elements') def test_xpay_maxfee(node_factory, bitcoind, chainparams): """Test which shows that we don't excees maxfee""" outfile = tempfile.NamedTemporaryFile(prefix='gossip-store-') subprocess.check_output(['devtools/gossmap-compress', 'decompress', '--node-map=3301=022d223620a359a47ff7f7ac447c85c46c923da53389221a0054c11c1e3ca31d59', 'tests/data/gossip-store-2024-09-22.compressed', outfile.name]).decode('utf-8').splitlines() AMOUNT = 100_000_000 # l2 will warn l1 about its invalid gossip: ignore. # We throttle l1's gossip to avoid massive log spam. > l1, l2 = node_factory.line_graph(2, # This is in sats, so 1000x amount we send. fundamount=AMOUNT, opts=[{'gossip_store_file': outfile.name, 'subdaemon': 'channeld:../tests/plugins/channeld_fakenet', 'allow_warning': True, 'dev-throttle-gossip': None}, {'allow_bad_gossip': True}]) tests/test_xpay.py:509: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ contrib/pyln-testing/pyln/testing/utils.py:1720: in line_graph nodes = self.get_nodes(num_nodes, opts=opts) contrib/pyln-testing/pyln/testing/utils.py:1602: in get_nodes return [j.result() for j in jobs] contrib/pyln-testing/pyln/testing/utils.py:1602: in <listcomp> return [j.result() for j in jobs] /opt/hostedtoolcache/Python/3.10.16/x64/lib/python3.10/concurrent/futures/_base.py:458: in result return self.__get_result() /opt/hostedtoolcache/Python/3.10.16/x64/lib/python3.10/concurrent/futures/_base.py:403: in __get_result raise self._exception /opt/hostedtoolcache/Python/3.10.16/x64/lib/python3.10/concurrent/futures/thread.py:58: in run result = self.fn(*self.args, **self.kwargs) contrib/pyln-testing/pyln/testing/utils.py:1653: in get_node node.start(wait_for_bitcoind_sync) contrib/pyln-testing/pyln/testing/utils.py:1015: in start self.daemon.start(stderr_redir=stderr_redir) contrib/pyln-testing/pyln/testing/utils.py:671: in start self.wait_for_log("Server started with public key") contrib/pyln-testing/pyln/testing/utils.py:355: in wait_for_log return self.wait_for_logs([regex], timeout) _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ self = <pyln.testing.utils.LightningD object at 0x7f27ab586c20> regexs = ['Server started with public key'], timeout = 180 def wait_for_logs(self, regexs, timeout=TIMEOUT): """Look for `regexs` in the logs. The logs contain tailed stdout of the process. We look for each regex in `regexs`, starting from `logsearch_start` which normally is the position of the last found entry of a previous wait-for logs call. The ordering inside `regexs` doesn't matter. We fail if the timeout is exceeded or if the underlying process exits before all the `regexs` were found. If timeout is None, no time-out is applied. """ logging.debug("Waiting for {} in the logs".format(regexs)) exs = [re.compile(r) for r in regexs] start_time = time.time() while True: if self.logsearch_start >= len(self.logs): if not self.logs_catchup(): time.sleep(0.25) if timeout is not None and time.time() > start_time + timeout: print("Time-out: can't find {} in logs".format(exs)) for r in exs: if self.is_in_log(r): print("({} was previously in logs!)".format(r)) > raise TimeoutError('Unable to find "{}" in logs.'.format(exs)) E TimeoutError: Unable to find "[re.compile('Server started with public key')]" in logs. ``` gossipd (and other plugins) simply take too long to digest the gossmap under valgrind. Signed-off-by: Rusty Russell <[email protected]>
- Loading branch information