Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

92

93

94

95

96

97

98

99

100

101

102

103

104

105

106

107

108

109

110

111

112

113

114

115

116

117

118

119

120

121

122

123

124

125

126

127

128

129

130

131

132

133

134

135

136

137

138

139

140

141

142

143

144

145

146

147

148

149

150

151

152

153

154

155

156

157

158

159

160

161

162

163

164

165

166

167

168

169

170

171

172

173

174

175

176

177

178

179

180

181

182

183

184

185

186

187

188

189

190

191

192

193

194

195

196

197

198

199

200

201

202

203

204

205

206

207

208

209

210

211

212

213

214

215

216

217

218

219

220

221

222

223

224

225

226

227

228

229

230

231

232

233

234

235

236

237

238

239

240

241

242

243

244

245

246

247

248

249

250

251

252

253

254

255

256

257

258

259

260

261

262

263

264

265

266

267

268

269

270

271

272

273

274

275

276

277

278

279

280

281

282

283

284

285

286

287

288

289

290

291

292

293

294

295

296

297

298

299

300

301

302

303

304

305

306

307

308

309

310

311

312

313

314

315

316

317

318

319

320

321

322

323

324

325

326

327

328

329

330

331

332

333

334

335

336

337

338

339

340

341

342

343

344

345

346

347

348

349

350

351

352

353

354

355

356

357

358

359

360

361

362

363

364

365

366

367

368

369

370

371

372

373

374

375

376

377

378

379

380

381

382

383

384

385

386

387

388

389

390

391

392

393

394

395

396

397

398

399

400

401

402

403

404

405

406

407

408

409

410

411

412

413

414

415

416

417

418

419

420

421

422

423

424

425

426

427

428

429

430

431

432

433

434

435

436

437

438

439

440

441

442

443

444

445

446

447

448

449

450

451

452

453

454

455

456

457

458

459

460

461

462

463

464

465

466

467

468

469

470

471

472

473

474

475

476

477

478

479

480

481

482

483

484

485

486

487

488

489

490

491

492

493

494

495

496

497

498

499

500

501

502

503

504

505

506

507

508

509

510

511

512

513

514

515

516

517

518

519

520

521

522

523

524

525

526

527

528

529

530

531

532

533

534

535

536

537

538

539

540

541

542

543

544

545

546

547

548

549

550

551

552

553

554

555

556

557

558

559

560

561

562

563

564

565

566

567

568

569

570

571

572

573

574

575

576

577

578

579

580

581

582

583

584

585

586

587

588

589

590

591

592

593

594

595

596

597

598

599

600

601

602

603

604

605

606

607

608

609

610

611

612

613

614

615

616

617

618

619

620

621

622

623

624

625

626

627

628

629

630

631

632

633

634

635

636

637

638

639

640

641

642

643

644

645

646

647

648

649

650

651

652

653

654

655

656

657

658

659

660

661

662

663

664

665

666

667

668

669

670

671

672

673

674

675

676

677

678

679

680

681

682

683

684

685

686

687

688

689

690

691

692

693

694

695

696

697

698

699

700

701

702

703

704

705

706

707

708

709

710

711

712

713

714

715

716

717

718

719

720

721

722

723

724

725

726

727

728

729

730

731

732

733

734

735

736

737

738

739

740

741

742

743

744

745

746

747

748

749

750

751

752

753

754

755

756

757

758

759

760

761

762

763

764

765

766

767

768

769

770

771

772

773

774

775

776

777

778

779

780

781

782

783

784

785

786

787

788

789

790

791

792

793

794

795

796

797

798

799

800

801

802

803

804

805

806

807

808

809

810

811

812

813

814

815

816

817

818

819

820

821

822

823

824

825

826

827

828

829

830

831

832

833

834

835

836

837

838

839

840

841

842

843

844

845

846

847

848

849

850

851

852

853

854

855

856

857

858

859

860

861

862

863

864

865

866

867

868

869

870

871

872

873

874

875

876

877

878

879

880

881

882

883

884

885

886

887

888

889

890

891

892

893

894

895

896

897

898

899

900

901

902

903

904

905

906

907

908

909

910

911

912

913

914

915

916

917

918

919

920

921

922

923

924

925

926

927

928

929

930

931

932

933

934

935

936

937

938

939

940

941

942

943

944

945

946

947

948

949

950

951

952

953

954

955

956

957

958

959

960

961

962

963

964

965

966

967

968

969

970

971

972

973

974

975

976

977

978

979

980

981

982

983

984

985

986

987

988

989

990

991

992

993

994

995

996

997

998

999

1000

1001

1002

1003

1004

1005

1006

1007

1008

1009

1010

1011

1012

1013

1014

1015

1016

1017

1018

1019

1020

1021

1022

1023

1024

1025

1026

1027

1028

1029

1030

1031

1032

1033

1034

1035

1036

1037

1038

1039

1040

1041

1042

1043

1044

1045

1046

1047

1048

1049

1050

1051

1052

1053

1054

1055

1056

1057

1058

1059

1060

1061

1062

1063

1064

1065

1066

1067

1068

1069

1070

1071

1072

1073

1074

1075

1076

1077

1078

1079

1080

1081

1082

1083

1084

1085

1086

1087

1088

1089

1090

1091

1092

1093

1094

1095

1096

1097

1098

1099

1100

1101

1102

1103

1104

1105

1106

1107

1108

1109

1110

1111

1112

1113

1114

1115

1116

1117

1118

1119

1120

1121

1122

1123

1124

1125

1126

1127

1128

1129

1130

1131

1132

1133

1134

1135

1136

1137

1138

1139

1140

1141

1142

1143

1144

1145

1146

1147

1148

1149

1150

1151

1152

1153

1154

1155

1156

1157

1158

1159

1160

1161

1162

1163

1164

1165

1166

1167

1168

1169

1170

1171

1172

1173

1174

1175

1176

1177

1178

1179

1180

1181

1182

1183

1184

1185

1186

1187

1188

1189

1190

1191

1192

1193

1194

1195

1196

1197

1198

1199

1200

1201

1202

1203

1204

1205

1206

1207

1208

1209

1210

1211

1212

1213

1214

1215

1216

1217

1218

1219

1220

1221

1222

1223

1224

1225

1226

1227

1228

1229

1230

1231

1232

1233

1234

1235

1236

1237

1238

1239

1240

1241

1242

1243

1244

1245

1246

1247

1248

1249

1250

1251

1252

1253

1254

1255

1256

1257

1258

1259

1260

1261

1262

1263

1264

1265

1266

1267

1268

1269

1270

1271

1272

1273

1274

1275

1276

1277

1278

1279

1280

1281

1282

1283

1284

1285

1286

1287

1288

1289

1290

1291

1292

1293

1294

1295

1296

1297

1298

1299

1300

1301

1302

1303

1304

1305

1306

1307

1308

1309

1310

1311

1312

1313

1314

1315

1316

1317

1318

1319

1320

1321

1322

1323

1324

1325

1326

1327

1328

1329

1330

1331

1332

1333

1334

1335

1336

1337

1338

1339

1340

1341

1342

1343

1344

1345

1346

1347

1348

1349

1350

1351

1352

1353

1354

1355

1356

1357

1358

1359

1360

1361

1362

1363

1364

1365

1366

1367

1368

1369

1370

1371

1372

1373

1374

1375

1376

1377

1378

1379

1380

1381

1382

1383

1384

1385

1386

1387

1388

1389

1390

1391

1392

1393

1394

1395

1396

1397

1398

1399

1400

1401

1402

1403

1404

1405

1406

1407

1408

1409

1410

1411

1412

1413

1414

1415

1416

1417

1418

1419

1420

1421

1422

1423

1424

1425

1426

1427

1428

1429

1430

1431

1432

1433

1434

1435

1436

1437

1438

1439

1440

1441

1442

1443

1444

1445

1446

1447

1448

1449

1450

1451

1452

1453

1454

1455

1456

1457

1458

1459

1460

1461

1462

1463

1464

1465

1466

1467

1468

1469

1470

1471

1472

1473

1474

1475

1476

1477

1478

1479

1480

1481

1482

1483

1484

1485

1486

1487

1488

1489

1490

1491

1492

1493

1494

1495

1496

1497

1498

1499

1500

1501

1502

1503

1504

1505

1506

1507

1508

1509

1510

1511

1512

1513

1514

1515

1516

1517

1518

1519

1520

1521

1522

1523

1524

1525

1526

1527

1528

1529

1530

1531

1532

1533

1534

1535

1536

1537

1538

1539

1540

1541

1542

1543

1544

1545

1546

1547

1548

1549

1550

1551

1552

1553

1554

1555

1556

1557

1558

1559

1560

1561

1562

1563

1564

1565

1566

1567

1568

1569

1570

1571

1572

1573

1574

1575

1576

1577

1578

1579

1580

1581

1582

1583

1584

1585

1586

1587

1588

1589

1590

1591

1592

1593

1594

1595

1596

1597

1598

1599

1600

1601

1602

1603

1604

1605

1606

1607

1608

1609

1610

1611

1612

1613

1614

1615

1616

1617

1618

1619

1620

1621

1622

1623

1624

1625

1626

1627

1628

1629

1630

1631

1632

1633

1634

1635

1636

1637

1638

1639

1640

1641

1642

1643

1644

1645

1646

1647

1648

1649

1650

1651

1652

1653

1654

1655

1656

1657

1658

1659

1660

1661

1662

1663

1664

1665

1666

1667

1668

1669

1670

1671

1672

1673

1674

1675

1676

1677

1678

1679

1680

1681

1682

1683

1684

1685

1686

1687

1688

1689

1690

1691

1692

1693

1694

1695

1696

1697

1698

1699

1700

1701

1702

1703

1704

1705

1706

1707

1708

1709

1710

1711

1712

1713

1714

1715

1716

1717

1718

1719

1720

1721

1722

1723

1724

1725

1726

1727

1728

1729

1730

1731

1732

1733

1734

1735

1736

1737

1738

1739

1740

1741

1742

1743

1744

1745

1746

1747

1748

1749

1750

1751

1752

1753

1754

1755

1756

1757

1758

1759

1760

1761

1762

1763

1764

1765

1766

1767

1768

1769

1770

1771

1772

1773

1774

1775

1776

1777

1778

1779

1780

1781

1782

1783

1784

1785

1786

1787

1788

1789

1790

1791

1792

1793

1794

1795

1796

1797

1798

1799

1800

1801

1802

1803

1804

1805

1806

1807

1808

1809

1810

1811

1812

1813

1814

1815

1816

1817

1818

1819

1820

1821

1822

1823

1824

1825

1826

1827

1828

1829

1830

1831

1832

1833

1834

1835

1836

1837

1838

1839

1840

1841

1842

1843

1844

1845

1846

1847

1848

1849

1850

1851

1852

1853

1854

1855

1856

1857

1858

1859

1860

1861

1862

1863

1864

1865

1866

1867

1868

1869

1870

1871

1872

1873

1874

1875

1876

1877

1878

1879

1880

1881

1882

1883

1884

1885

1886

1887

1888

1889

1890

1891

1892

1893

1894

1895

1896

1897

1898

1899

1900

1901

1902

1903

1904

1905

1906

1907

1908

1909

1910

1911

1912

1913

1914

1915

1916

1917

1918

1919

1920

1921

1922

1923

1924

1925

1926

1927

1928

1929

1930

1931

1932

1933

1934

1935

1936

1937

1938

1939

1940

1941

1942

1943

1944

1945

1946

1947

1948

1949

1950

1951

1952

1953

1954

1955

1956

1957

1958

1959

1960

1961

1962

1963

1964

1965

1966

1967

1968

1969

1970

1971

1972

1973

1974

1975

1976

1977

1978

1979

1980

1981

1982

1983

1984

1985

1986

1987

1988

1989

1990

1991

1992

1993

1994

1995

1996

1997

1998

1999

2000

2001

2002

2003

2004

2005

2006

2007

2008

2009

2010

2011

2012

2013

2014

2015

2016

2017

2018

2019

2020

2021

2022

2023

2024

2025

2026

2027

2028

2029

2030

2031

2032

2033

2034

2035

2036

2037

2038

2039

2040

2041

2042

2043

2044

2045

2046

2047

2048

2049

2050

2051

2052

2053

2054

2055

2056

2057

2058

2059

2060

2061

2062

2063

2064

2065

2066

2067

2068

2069

2070

2071

2072

2073

2074

2075

2076

2077

2078

2079

2080

2081

2082

2083

2084

2085

2086

2087

2088

2089

2090

2091

2092

2093

2094

2095

2096

2097

2098

2099

2100

2101

2102

2103

2104

2105

2106

2107

2108

2109

2110

2111

2112

2113

2114

2115

2116

2117

2118

2119

2120

2121

2122

2123

2124

2125

2126

2127

2128

2129

2130

2131

2132

2133

2134

2135

2136

2137

2138

2139

2140

2141

2142

2143

2144

2145

2146

2147

2148

2149

2150

2151

2152

2153

2154

2155

2156

2157

2158

2159

2160

2161

2162

2163

2164

2165

2166

2167

2168

2169

2170

2171

2172

2173

2174

2175

2176

2177

2178

2179

2180

2181

2182

2183

2184

2185

2186

2187

2188

2189

2190

2191

2192

2193

2194

2195

2196

2197

2198

2199

2200

2201

2202

2203

2204

2205

2206

2207

2208

2209

2210

2211

2212

2213

2214

2215

2216

2217

2218

2219

2220

2221

2222

2223

2224

2225

2226

2227

2228

2229

2230

2231

2232

2233

2234

2235

2236

2237

2238

2239

2240

2241

2242

2243

2244

2245

2246

2247

2248

2249

2250

2251

2252

2253

2254

2255

2256

2257

2258

2259

2260

2261

2262

2263

2264

2265

2266

2267

2268

2269

2270

2271

2272

2273

2274

2275

2276

2277

2278

2279

2280

2281

2282

2283

2284

2285

2286

2287

2288

2289

2290

2291

2292

2293

2294

2295

2296

2297

2298

2299

2300

2301

2302

2303

2304

2305

2306

2307

2308

2309

2310

2311

2312

2313

2314

2315

2316

2317

2318

2319

2320

2321

2322

2323

2324

2325

2326

2327

2328

2329

2330

2331

2332

2333

2334

2335

2336

2337

2338

2339

2340

2341

2342

2343

2344

2345

2346

2347

2348

2349

2350

2351

2352

2353

2354

2355

2356

2357

2358

2359

2360

2361

2362

2363

2364

2365

2366

2367

2368

2369

2370

2371

2372

2373

2374

2375

2376

2377

2378

2379

2380

2381

2382

2383

2384

2385

2386

2387

2388

2389

2390

2391

2392

2393

2394

2395

2396

2397

""" 

Processes for running doctests 

 

This module controls the processes started by Sage that actually run 

the doctests. 

 

EXAMPLES: 

 

The following examples are used in doctesting this file:: 

 

sage: doctest_var = 42; doctest_var^2 

1764 

sage: R.<a> = ZZ[] 

sage: a + doctest_var 

a + 42 

 

AUTHORS: 

 

- David Roe (2012-03-27) -- initial version, based on Robert Bradshaw's code. 

 

- Jeroen Demeyer (2013 and 2015) -- major improvements to forking and logging 

""" 

 

#***************************************************************************** 

# Copyright (C) 2012 David Roe <roed.math@gmail.com> 

# Robert Bradshaw <robertwb@gmail.com> 

# William Stein <wstein@gmail.com> 

# Copyright (C) 2013-2015 Jeroen Demeyer <jdemeyer@cage.ugent.be> 

# 

# Distributed under the terms of the GNU General Public License (GPL) 

# as published by the Free Software Foundation; either version 2 of 

# the License, or (at your option) any later version. 

# http://www.gnu.org/licenses/ 

#***************************************************************************** 

 

from __future__ import print_function 

from __future__ import absolute_import 

import __future__ 

 

import hashlib, multiprocessing, os, sys, time, warnings, signal, linecache 

import errno 

import doctest, traceback 

import tempfile 

import six 

 

import sage.misc.randstate as randstate 

from .util import Timer, RecordingDict, count_noun 

from .sources import DictAsObject, FileDocTestSource 

from .parsing import OriginalSource, reduce_hex 

from sage.structure.sage_object import SageObject 

from .parsing import SageOutputChecker, pre_hash, get_source 

from sage.repl.user_globals import set_globals 

from sage.interfaces.process import ContainChildren 

from sage.cpython.atexit import restore_atexit 

from sage.cpython.string import bytes_to_str, str_to_bytes 

 

 

# All doctests run as if the following future imports are present 

MANDATORY_COMPILE_FLAGS = __future__.print_function.compiler_flag 

 

 

def init_sage(): 

""" 

Import the Sage library. 

 

This function is called once at the beginning of a doctest run 

(rather than once for each file). It imports the Sage library, 

sets DOCTEST_MODE to True, and invalidates any interfaces. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import init_sage 

sage: sage.doctest.DOCTEST_MODE = False 

sage: init_sage() 

sage: sage.doctest.DOCTEST_MODE 

True 

 

Check that pexpect interfaces are invalidated, but still work:: 

 

sage: gap.eval("my_test_var := 42;") 

'42' 

sage: gap.eval("my_test_var;") 

'42' 

sage: init_sage() 

sage: gap('Group((1,2,3)(4,5), (3,4))') 

Group( [ (1,2,3)(4,5), (3,4) ] ) 

sage: gap.eval("my_test_var;") 

Traceback (most recent call last): 

... 

RuntimeError: Gap produced error output... 

 

Check that SymPy equation pretty printer is limited in doctest 

mode to default width (80 chars):: 

 

sage: from sympy import sympify 

sage: from sympy.printing.pretty.pretty import PrettyPrinter 

sage: s = sympify('+x^'.join(str(i) for i in range(30))) 

sage: print(PrettyPrinter(settings={'wrap_line':True}).doprint(s)) 

29 28 27 26 25 24 23 22 21 20 19 18 17 

x + x + x + x + x + x + x + x + x + x + x + x + x + 

<BLANKLINE> 

16 15 14 13 12 11 10 9 8 7 6 5 4 3 

x + x + x + x + x + x + x + x + x + x + x + x + x + x + x 

<BLANKLINE> 

2 

+ x 

 

The displayhook sorts dictionary keys to simplify doctesting of 

dictionary output:: 

 

sage: {'a':23, 'b':34, 'au':56, 'bbf':234, 'aaa':234} 

{'a': 23, 'aaa': 234, 'au': 56, 'b': 34, 'bbf': 234} 

""" 

# We need to ensure that the Matplotlib font cache is built to 

# avoid spurious warnings (see Trac #20222). 

import matplotlib.font_manager 

 

# Make sure that the agg backend is selected during doctesting. 

# This needs to be done before any other matplotlib calls. 

matplotlib.use('agg') 

 

# Do this once before forking off child processes running the tests. 

# This is more efficient because we only need to wait once for the 

# Sage imports. 

import sage.doctest 

sage.doctest.DOCTEST_MODE=True 

import sage.all_cmdline 

sage.interfaces.quit.invalidate_all() 

 

# Disable cysignals debug messages in doctests: this is needed to 

# make doctests pass when cysignals was built with debugging enabled 

from cysignals.signals import set_debug_level 

set_debug_level(0) 

 

# Use the rich output backend for doctest 

from sage.repl.rich_output import get_display_manager 

dm = get_display_manager() 

from sage.repl.rich_output.backend_doctest import BackendDoctest 

dm.switch_backend(BackendDoctest()) 

 

# Switch on extra debugging 

from sage.structure.debug_options import debug 

debug.refine_category_hash_check = True 

 

# We import readline before forking, otherwise Pdb doesn't work 

# os OS X: http://trac.sagemath.org/14289 

import readline 

 

# Disable SymPy terminal width detection 

from sympy.printing.pretty.stringpict import stringPict 

stringPict.terminal_width = lambda self:0 

 

 

def showwarning_with_traceback(message, category, filename, lineno, file=None, line=None): 

r""" 

Displays a warning message with a traceback. 

 

INPUT: see :func:`warnings.showwarning`. 

 

OUTPUT: None 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import showwarning_with_traceback 

sage: showwarning_with_traceback("bad stuff", UserWarning, "myfile.py", 0) 

doctest:warning 

... 

File "<doctest sage.doctest.forker.showwarning_with_traceback[1]>", line 1, in <module> 

showwarning_with_traceback("bad stuff", UserWarning, "myfile.py", Integer(0)) 

: 

UserWarning: bad stuff 

""" 

# Flush stdout to get predictable ordering of output and warnings 

sys.stdout.flush() 

 

# Get traceback to display in warning 

tb = traceback.extract_stack() 

tb = tb[:-1] # Drop this stack frame for showwarning_with_traceback() 

 

# Format warning 

lines = ["doctest:warning\n"] # Match historical warning messages in doctests 

lines.extend(traceback.format_list(tb)) 

lines.append(":\n") # Match historical warning messages in doctests 

lines.extend(traceback.format_exception_only(category, category(message))) 

 

if file is None: 

file = sys.stderr 

try: 

file.writelines(lines) 

file.flush() 

except IOError: 

pass # the file is invalid 

 

 

class SageSpoofInOut(SageObject): 

r""" 

We replace the standard :class:`doctest._SpoofOut` for three reasons: 

 

- we need to divert the output of C programs that don't print 

through sys.stdout, 

- we want the ability to recover partial output from doctest 

processes that segfault. 

- we also redirect stdin (usually from /dev/null) during doctests. 

 

This class defines streams ``self.real_stdin``, ``self.real_stdout`` 

and ``self.real_stderr`` which refer to the original streams. 

 

INPUT: 

 

- ``outfile`` -- (default: ``tempfile.TemporaryFile()``) a seekable open file 

object to which stdout and stderr should be redirected. 

 

- ``infile`` -- (default: ``open(os.devnull)``) an open file object 

from which stdin should be redirected. 

 

EXAMPLES:: 

 

sage: import subprocess, tempfile 

sage: from sage.doctest.forker import SageSpoofInOut 

sage: O = tempfile.TemporaryFile() 

sage: S = SageSpoofInOut(O) 

sage: try: 

....: S.start_spoofing() 

....: print("hello world") 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: S.getvalue() 

'hello world\n' 

sage: _ = O.seek(0) 

sage: S = SageSpoofInOut(outfile=sys.stdout, infile=O) 

sage: try: 

....: S.start_spoofing() 

....: _ = subprocess.check_call("cat") 

....: finally: 

....: S.stop_spoofing() 

....: 

hello world 

sage: O.close() 

""" 

def __init__(self, outfile=None, infile=None): 

""" 

Initialization. 

 

TESTS:: 

 

sage: from tempfile import TemporaryFile 

sage: from sage.doctest.forker import SageSpoofInOut 

sage: with TemporaryFile() as outfile: 

....: with TemporaryFile() as infile: 

....: SageSpoofInOut(outfile, infile) 

<sage.doctest.forker.SageSpoofInOut object at ...> 

""" 

if infile is None: 

self.infile = open(os.devnull) 

self._close_infile = True 

else: 

self.infile = infile 

self._close_infile = False 

if outfile is None: 

self.outfile = tempfile.TemporaryFile() 

self._close_outfile = True 

else: 

self.outfile = outfile 

self._close_outfile = False 

self.spoofing = False 

self.real_stdin = os.fdopen(os.dup(sys.stdin.fileno()), "r") 

self.real_stdout = os.fdopen(os.dup(sys.stdout.fileno()), "w") 

self.real_stderr = os.fdopen(os.dup(sys.stderr.fileno()), "w") 

self.position = 0 

 

def __del__(self): 

""" 

Stop spoofing. 

 

TESTS:: 

 

sage: from sage.doctest.forker import SageSpoofInOut 

sage: spoof = SageSpoofInOut() 

sage: spoof.start_spoofing() 

sage: print("Spoofed!") # No output 

sage: del spoof 

sage: print("Not spoofed!") 

Not spoofed! 

""" 

self.stop_spoofing() 

if self._close_infile: 

self.infile.close() 

if self._close_outfile: 

self.outfile.close() 

for stream in ('stdin', 'stdout', 'stderr'): 

getattr(self, 'real_' + stream).close() 

 

def start_spoofing(self): 

r""" 

Set stdin to read from ``self.infile`` and stdout to print to 

``self.outfile``. 

 

EXAMPLES:: 

 

sage: import os, tempfile 

sage: from sage.doctest.forker import SageSpoofInOut 

sage: O = tempfile.TemporaryFile() 

sage: S = SageSpoofInOut(O) 

sage: try: 

....: S.start_spoofing() 

....: print("this is not printed") 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: S.getvalue() 

'this is not printed\n' 

sage: _ = O.seek(0) 

sage: S = SageSpoofInOut(infile=O) 

sage: try: 

....: S.start_spoofing() 

....: v = sys.stdin.read() 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: v 

'this is not printed\n' 

 

We also catch non-Python output:: 

 

sage: try: 

....: S.start_spoofing() 

....: retval = os.system('''echo "Hello there"\nif [ $? -eq 0 ]; then\necho "good"\nfi''') 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: S.getvalue() 

'Hello there\ngood\n' 

sage: O.close() 

""" 

if not self.spoofing: 

sys.stdout.flush() 

sys.stderr.flush() 

self.outfile.flush() 

os.dup2(self.infile.fileno(), sys.stdin.fileno()) 

os.dup2(self.outfile.fileno(), sys.stdout.fileno()) 

os.dup2(self.outfile.fileno(), sys.stderr.fileno()) 

self.spoofing = True 

 

def stop_spoofing(self): 

""" 

Reset stdin and stdout to their original values. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import SageSpoofInOut 

sage: S = SageSpoofInOut() 

sage: try: 

....: S.start_spoofing() 

....: print("this is not printed") 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: print("this is now printed") 

this is now printed 

""" 

if self.spoofing: 

sys.stdout.flush() 

sys.stderr.flush() 

self.real_stdout.flush() 

self.real_stderr.flush() 

os.dup2(self.real_stdin.fileno(), sys.stdin.fileno()) 

os.dup2(self.real_stdout.fileno(), sys.stdout.fileno()) 

os.dup2(self.real_stderr.fileno(), sys.stderr.fileno()) 

self.spoofing = False 

 

def getvalue(self): 

r""" 

Gets the value that has been printed to ``outfile`` since the 

last time this function was called. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import SageSpoofInOut 

sage: S = SageSpoofInOut() 

sage: try: 

....: S.start_spoofing() 

....: print("step 1") 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: S.getvalue() 

'step 1\n' 

sage: try: 

....: S.start_spoofing() 

....: print("step 2") 

....: finally: 

....: S.stop_spoofing() 

....: 

sage: S.getvalue() 

'step 2\n' 

""" 

sys.stdout.flush() 

self.outfile.seek(self.position) 

result = self.outfile.read() 

self.position = self.outfile.tell() 

if not result.endswith(b"\n"): 

result += b"\n" 

return bytes_to_str(result) 

 

 

class SageDocTestRunner(doctest.DocTestRunner, object): 

def __init__(self, *args, **kwds): 

""" 

A customized version of DocTestRunner that tracks dependencies 

of doctests. 

 

INPUT: 

 

- ``stdout`` -- an open file to restore for debugging 

 

- ``checker`` -- None, or an instance of 

:class:`doctest.OutputChecker` 

 

- ``verbose`` -- boolean, determines whether verbose printing 

is enabled. 

 

- ``optionflags`` -- Controls the comparison with the expected 

output. See :mod:`testmod` for more information. 

 

- ``coverage`` -- an instance of :class:`coverage.Coverage` for 

tracing test coverage, or ``None`` for no coverage. 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: DTR 

<sage.doctest.forker.SageDocTestRunner object at ...> 

""" 

O = kwds.pop('outtmpfile', None) 

self.msgfile = kwds.pop('msgfile', None) 

self.options = kwds.pop('sage_options') 

self.coverage = kwds.pop('coverage', None) 

doctest.DocTestRunner.__init__(self, *args, **kwds) 

self._fakeout = SageSpoofInOut(O) 

if self.msgfile is None: 

self.msgfile = self._fakeout.real_stdout 

self.history = [] 

self.references = [] 

self.setters = {} 

self.running_global_digest = hashlib.md5() 

 

def _run(self, test, compileflags, out): 

""" 

This function replaces :meth:`doctest.DocTestRunner.__run`. 

 

It changes the following behavior: 

 

- We call :meth:`SageDocTestRunner.execute` rather than just 

exec 

 

- We don't truncate _fakeout after each example since we want 

the output file to be readable by the calling 

:class:`SageWorker`. 

 

Since it needs to be able to read stdout, it should be called 

while spoofing using :class:`SageSpoofInOut`. 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: DTR.run(doctests[0], clear_globs=False) # indirect doctest 

TestResults(failed=0, attempted=4) 

""" 

# Ensure that injecting globals works as expected in doctests 

set_globals(test.globs) 

 

# Keep track of the number of failures and tries. 

failures = tries = 0 

quiet = False 

 

# Save the option flags (since option directives can be used 

# to modify them). 

original_optionflags = self.optionflags 

 

SUCCESS, FAILURE, BOOM = range(3) # `outcome` state 

 

check = self._checker.check_output 

 

# Process each example. 

for examplenum, example in enumerate(test.examples): 

if failures: 

# If exitfirst is set, abort immediately after a 

# failure. 

if self.options.exitfirst: 

break 

 

# If REPORT_ONLY_FIRST_FAILURE is set, then suppress 

# reporting after the first failure (but continue 

# running the tests). 

quiet |= (self.optionflags & doctest.REPORT_ONLY_FIRST_FAILURE) 

 

# Merge in the example's options. 

self.optionflags = original_optionflags 

if example.options: 

for (optionflag, val) in example.options.items(): 

if val: 

self.optionflags |= optionflag 

else: 

self.optionflags &= ~optionflag 

 

# If 'SKIP' is set, then skip this example. 

if self.optionflags & doctest.SKIP: 

continue 

 

# Record that we started this example. 

tries += 1 

# We print the example we're running for easier debugging 

# if this file times out or crashes. 

with OriginalSource(example): 

print("sage: " + example.source[:-1] + " ## line %s ##"%(test.lineno + example.lineno + 1)) 

# Update the position so that result comparison works 

throwaway = self._fakeout.getvalue() 

if not quiet: 

self.report_start(out, test, example) 

 

# Flush files before running the example, so we know for 

# sure that everything is reported properly if the test 

# crashes. 

sys.stdout.flush() 

sys.stderr.flush() 

self.msgfile.flush() 

 

# Use a special filename for compile(), so we can retrieve 

# the source code during interactive debugging (see 

# __patched_linecache_getlines). 

filename = '<doctest %s[%d]>' % (test.name, examplenum) 

 

# Run the example in the given context (globs), and record 

# any exception that gets raised. But for SystemExit, we 

# simply propagate the exception. 

exception = None 

try: 

compiler = lambda example:compile( 

example.source, filename, "single", compileflags, 1) 

# Don't blink! This is where the user's code gets run. 

self.compile_and_execute(example, compiler, test.globs) 

except SystemExit: 

raise 

except BaseException: 

exception = sys.exc_info() 

finally: 

if self.debugger is not None: 

self.debugger.set_continue() # ==== Example Finished ==== 

got = self._fakeout.getvalue() 

 

if not isinstance(got, six.text_type): 

# On Python 3 got should already be unicode text, but on Python 

# 2 it is not. For comparison's sake we want the unicode text 

# decoded from UTF-8. If there was some error such that the 

# output is so malformed that it does not even decode from 

# UTF-8 at all there will be an error in the test framework 

# here. But this shouldn't happen at all, so we want it to be 

# understood as an error in the test framework, and not some 

# subtle error in the code under test. 

got = got.decode('utf-8') 

 

outcome = FAILURE # guilty until proved innocent or insane 

 

# If the example executed without raising any exceptions, 

# verify its output. 

if exception is None: 

if check(example.want, got, self.optionflags): 

outcome = SUCCESS 

 

# The example raised an exception: check if it was expected. 

else: 

exc_info = exception 

exc_msg = traceback.format_exception_only(*exc_info[:2])[-1] 

 

if six.PY3 and example.exc_msg is not None: 

# On Python 3 the exception repr often includes the 

# exception's full module name (for non-builtin 

# exceptions), whereas on Python 2 does not, so we 

# normalize Python 3 exceptions to match tests written to 

# Python 2 

# See https://trac.sagemath.org/ticket/24271 

exc_cls = exc_info[0] 

exc_name = exc_cls.__name__ 

if exc_cls.__module__: 

exc_fullname = (exc_cls.__module__ + '.' + 

exc_cls.__qualname__) 

else: 

exc_fullname = exc_cls.__qualname__ 

 

# See 

# https://docs.python.org/3/library/exceptions.html#OSError 

oserror_aliases = ['IOError', 'EnvironmentError', 

'socket.error', 'select.error', 

'mmap.error'] 

 

if (example.exc_msg.startswith(exc_name) and 

exc_msg.startswith(exc_fullname)): 

exc_msg = exc_msg.replace(exc_fullname, exc_name, 1) 

else: 

# Special case: On Python 3 these exceptions are all 

# just aliases for OSError 

for alias in oserror_aliases: 

if example.exc_msg.startswith(alias + ':'): 

exc_msg = exc_msg.replace('OSError', alias, 1) 

break 

 

if not quiet: 

got += doctest._exception_traceback(exc_info) 

 

# If `example.exc_msg` is None, then we weren't expecting 

# an exception. 

if example.exc_msg is None: 

outcome = BOOM 

 

# We expected an exception: see whether it matches. 

elif check(example.exc_msg, exc_msg, self.optionflags): 

outcome = SUCCESS 

 

# Another chance if they didn't care about the detail. 

elif self.optionflags & doctest.IGNORE_EXCEPTION_DETAIL: 

m1 = re.match(r'(?:[^:]*\.)?([^:]*:)', example.exc_msg) 

m2 = re.match(r'(?:[^:]*\.)?([^:]*:)', exc_msg) 

if m1 and m2 and check(m1.group(1), m2.group(1), 

self.optionflags): 

outcome = SUCCESS 

 

# Report the outcome. 

if outcome is SUCCESS: 

if self.options.warn_long and example.walltime > self.options.warn_long: 

self.report_overtime(out, test, example, got) 

elif not quiet: 

self.report_success(out, test, example, got) 

elif outcome is FAILURE: 

if not quiet: 

self.report_failure(out, test, example, got, test.globs) 

failures += 1 

elif outcome is BOOM: 

if not quiet: 

self.report_unexpected_exception(out, test, example, 

exc_info) 

failures += 1 

else: 

assert False, ("unknown outcome", outcome) 

 

# Restore the option flags (in case they were modified) 

self.optionflags = original_optionflags 

 

# Record and return the number of failures and tries. 

self._DocTestRunner__record_outcome(test, failures, tries) 

 

return doctest.TestResults(failures, tries) 

 

def run(self, test, compileflags=None, out=None, clear_globs=True): 

""" 

Runs the examples in a given doctest. 

 

This function replaces :class:`doctest.DocTestRunner.run` 

since it needs to handle spoofing. It also leaves the display 

hook in place. 

 

INPUT: 

 

- ``test`` -- an instance of :class:`doctest.DocTest` 

 

- ``compileflags`` -- the set of compiler flags used to 

execute examples (passed in to the :func:`compile`). If 

None, they are filled in from the result of 

:func:`doctest._extract_future_flags` applied to 

``test.globs``. 

 

- ``out`` -- a function for writing the output (defaults to 

:func:`sys.stdout.write`). 

 

- ``clear_globs`` -- boolean (default True): whether to clear 

the namespace after running this doctest. 

 

OUTPUT: 

 

- ``f`` -- integer, the number of examples that failed 

 

- ``t`` -- the number of examples tried 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: DTR.run(doctests[0], clear_globs=False) 

TestResults(failed=0, attempted=4) 

""" 

self.setters = {} 

randstate.set_random_seed(0) 

warnings.showwarning = showwarning_with_traceback 

self.running_doctest_digest = hashlib.md5() 

self.test = test 

if compileflags is None: 

compileflags = doctest._extract_future_flags(test.globs) 

compileflags |= MANDATORY_COMPILE_FLAGS 

# We use this slightly modified version of Pdb because it 

# interacts better with the doctesting framework (like allowing 

# doctests for sys.settrace()). Since we already have output 

# spoofing in place, there is no need for redirection. 

if self.options.debug: 

self.debugger = doctest._OutputRedirectingPdb(sys.stdout) 

self.debugger.reset() 

else: 

self.debugger = None 

self.save_linecache_getlines = linecache.getlines 

linecache.getlines = self._DocTestRunner__patched_linecache_getlines 

if out is None: 

def out(s): 

self.msgfile.write(s) 

self.msgfile.flush() 

 

self._fakeout.start_spoofing() 

# If self.options.initial is set, we show only the first failure in each doctest block. 

self.no_failure_yet = True 

try: 

return self._run(test, compileflags, out) 

finally: 

self._fakeout.stop_spoofing() 

linecache.getlines = self.save_linecache_getlines 

if clear_globs: 

test.globs.clear() 

 

def summarize(self, verbose=None): 

""" 

Print results of testing to ``self.msgfile`` and return number 

of failures and tests run. 

 

INPUT: 

 

- ``verbose`` -- whether to print lots of stuff 

 

OUTPUT: 

 

- returns ``(f, t)``, a :class:`doctest.TestResults` instance 

giving the number of failures and the total number of tests 

run. 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: DTR._name2ft['sage.doctest.forker'] = (1,120) 

sage: results = DTR.summarize() 

********************************************************************** 

1 item had failures: 

1 of 120 in sage.doctest.forker 

sage: results 

TestResults(failed=1, attempted=120) 

""" 

if verbose is None: 

verbose = self._verbose 

m = self.msgfile 

notests = [] 

passed = [] 

failed = [] 

totalt = totalf = 0 

for x in self._name2ft.items(): 

name, (f, t) = x 

assert f <= t 

totalt += t 

totalf += f 

if not t: 

notests.append(name) 

elif not f: 

passed.append( (name, t) ) 

else: 

failed.append(x) 

if verbose: 

if notests: 

print(count_noun(len(notests), "item"), "had no tests:", file=m) 

notests.sort() 

for thing in notests: 

print(" %s"%thing, file=m) 

if passed: 

print(count_noun(len(passed), "item"), "passed all tests:", file=m) 

passed.sort() 

for thing, count in passed: 

print(" %s in %s"%(count_noun(count, "test", pad_number=3, pad_noun=True), thing), file=m) 

if failed: 

print(self.DIVIDER, file=m) 

print(count_noun(len(failed), "item"), "had failures:", file=m) 

failed.sort() 

for thing, (f, t) in failed: 

print(" %3d of %3d in %s"%(f, t, thing), file=m) 

if verbose: 

print(count_noun(totalt, "test") + " in " + count_noun(len(self._name2ft), "item") + ".", file=m) 

print("%s passed and %s failed."%(totalt - totalf, totalf), file=m) 

if totalf: 

print("***Test Failed***", file=m) 

else: 

print("Test passed.", file=m) 

m.flush() 

return doctest.TestResults(totalf, totalt) 

 

def update_digests(self, example): 

""" 

Update global and doctest digests. 

 

Sage's doctest runner tracks the state of doctests so that 

their dependencies are known. For example, in the following 

two lines :: 

 

sage: R.<x> = ZZ[] 

sage: f = x^2 + 1 

 

it records that the second line depends on the first since the 

first INSERTS ``x`` into the global namespace and the second 

line RETRIEVES ``x`` from the global namespace. 

 

This function updates the hashes that record these 

dependencies. 

 

INPUT: 

 

- ``example`` -- a :class:`doctest.Example` instance 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os, hashlib 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: DTR.running_global_digest.hexdigest() 

'd41d8cd98f00b204e9800998ecf8427e' 

sage: DTR.running_doctest_digest = hashlib.md5() 

sage: ex = doctests[0].examples[0]; ex.predecessors = None 

sage: DTR.update_digests(ex) 

sage: DTR.running_global_digest.hexdigest() 

'3cb44104292c3a3ab4da3112ce5dc35c' 

""" 

s = str_to_bytes(pre_hash(get_source(example)), 'utf-8') 

self.running_global_digest.update(s) 

self.running_doctest_digest.update(s) 

if example.predecessors is not None: 

digest = hashlib.md5(s) 

gen = (e.running_state for e in example.predecessors) 

digest.update(str_to_bytes(reduce_hex(gen), 'ascii')) 

example.running_state = digest.hexdigest() 

 

def compile_and_execute(self, example, compiler, globs): 

""" 

Runs the given example, recording dependencies. 

 

Rather than using a basic dictionary, Sage's doctest runner 

uses a :class:`sage.doctest.util.RecordingDict`, which records 

every time a value is set or retrieved. Executing the given 

code with this recording dictionary as the namespace allows 

Sage to track dependencies between doctest lines. For 

example, in the following two lines :: 

 

sage: R.<x> = ZZ[] 

sage: f = x^2 + 1 

 

the recording dictionary records that the second line depends 

on the first since the first INSERTS ``x`` into the global 

namespace and the second line RETRIEVES ``x`` from the global 

namespace. 

 

INPUT: 

 

- ``example`` -- a :class:`doctest.Example` instance. 

 

- ``compiler`` -- a callable that, applied to example, 

produces a code object 

 

- ``globs`` -- a dictionary in which to execute the code. 

 

OUTPUT: 

 

- the output of the compiled code snippet. 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.util import RecordingDict 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os, hashlib 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: DTR.running_doctest_digest = hashlib.md5() 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: globs = RecordingDict(globals()) 

sage: 'doctest_var' in globs 

False 

sage: doctests, extras = FDS.create_doctests(globs) 

sage: ex0 = doctests[0].examples[0] 

sage: compiler = lambda ex: compile(ex.source, '<doctest sage.doctest.forker[0]>', 'single', 32768, 1) 

sage: DTR.compile_and_execute(ex0, compiler, globs) 

1764 

sage: globs['doctest_var'] 

42 

sage: globs.set 

{'doctest_var'} 

sage: globs.got 

{'Integer'} 

 

Now we can execute some more doctests to see the dependencies. :: 

 

sage: ex1 = doctests[0].examples[1] 

sage: compiler = lambda ex:compile(ex.source, '<doctest sage.doctest.forker[1]>', 'single', 32768, 1) 

sage: DTR.compile_and_execute(ex1, compiler, globs) 

sage: sorted(list(globs.set)) 

['R', 'a'] 

sage: globs.got 

{'ZZ'} 

sage: ex1.predecessors 

[] 

 

:: 

 

sage: ex2 = doctests[0].examples[2] 

sage: compiler = lambda ex:compile(ex.source, '<doctest sage.doctest.forker[2]>', 'single', 32768, 1) 

sage: DTR.compile_and_execute(ex2, compiler, globs) 

a + 42 

sage: list(globs.set) 

[] 

sage: sorted(list(globs.got)) 

['a', 'doctest_var'] 

sage: set(ex2.predecessors) == set([ex0,ex1]) 

True 

""" 

if isinstance(globs, RecordingDict): 

globs.start() 

example.sequence_number = len(self.history) 

self.history.append(example) 

timer = Timer().start() 

try: 

compiled = compiler(example) 

timer.start() # reset timer 

 

if self.coverage is not None: 

self.coverage.start() 

 

exec(compiled, globs) 

finally: 

if self.coverage is not None: 

self.coverage.stop() 

 

timer.stop().annotate(example) 

 

if isinstance(globs, RecordingDict): 

example.predecessors = [] 

for name in globs.got: 

ref = self.setters.get(name) 

if ref is not None: 

example.predecessors.append(ref) 

for name in globs.set: 

self.setters[name] = example 

else: 

example.predecessors = None 

self.update_digests(example) 

example.total_state = self.running_global_digest.hexdigest() 

example.doctest_state = self.running_doctest_digest.hexdigest() 

 

def _failure_header(self, test, example, message='Failed example:'): 

""" 

We strip out ``sage:`` prompts, so we override 

:meth:`doctest.DocTestRunner._failure_header` for better 

reporting. 

 

INPUT: 

 

- ``test`` -- a :class:`doctest.DocTest` instance 

 

- ``example`` -- a :class:`doctest.Example` instance in ``test``. 

 

OUTPUT: 

 

- a string used for reporting that the given example failed. 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: ex = doctests[0].examples[0] 

sage: print(DTR._failure_header(doctests[0], ex)) 

********************************************************************** 

File ".../sage/doctest/forker.py", line 11, in sage.doctest.forker 

Failed example: 

doctest_var = 42; doctest_var^2 

<BLANKLINE> 

 

Without the source swapping:: 

 

sage: import doctest 

sage: print(doctest.DocTestRunner._failure_header(DTR, doctests[0], ex)) 

********************************************************************** 

File ".../sage/doctest/forker.py", line 11, in sage.doctest.forker 

Failed example: 

doctest_var = Integer(42); doctest_var**Integer(2) 

<BLANKLINE> 

 

The ``'Failed example:'`` message can be customized:: 

 

sage: print(DTR._failure_header(doctests[0], ex, message='Hello there!')) 

********************************************************************** 

File ".../sage/doctest/forker.py", line 11, in sage.doctest.forker 

Hello there! 

doctest_var = 42; doctest_var^2 

<BLANKLINE> 

""" 

out = [self.DIVIDER] 

with OriginalSource(example): 

if test.filename: 

if test.lineno is not None and example.lineno is not None: 

lineno = test.lineno + example.lineno + 1 

else: 

lineno = '?' 

out.append('File "%s", line %s, in %s' % 

(test.filename, lineno, test.name)) 

else: 

out.append('Line %s, in %s' % (example.lineno+1, test.name)) 

out.append(message) 

source = example.source 

out.append(doctest._indent(source)) 

return '\n'.join(out) 

 

def report_start(self, out, test, example): 

""" 

Called when an example starts. 

 

INPUT: 

 

- ``out`` -- a function for printing 

 

- ``test`` -- a :class:`doctest.DocTest` instance 

 

- ``example`` -- a :class:`doctest.Example` instance in ``test`` 

 

OUTPUT: 

 

- prints a report to ``out`` 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=True, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: ex = doctests[0].examples[0] 

sage: DTR.report_start(sys.stdout.write, doctests[0], ex) 

Trying (line 11): doctest_var = 42; doctest_var^2 

Expecting: 

1764 

""" 

# We completely replace doctest.DocTestRunner.report_start so that we can include line numbers 

with OriginalSource(example): 

if self._verbose: 

start_txt = ('Trying (line %s):'%(test.lineno + example.lineno + 1) 

+ doctest._indent(example.source)) 

if example.want: 

start_txt += 'Expecting:\n' + doctest._indent(example.want) 

else: 

start_txt += 'Expecting nothing\n' 

out(start_txt) 

 

def report_success(self, out, test, example, got): 

""" 

Called when an example succeeds. 

 

INPUT: 

 

- ``out`` -- a function for printing 

 

- ``test`` -- a :class:`doctest.DocTest` instance 

 

- ``example`` -- a :class:`doctest.Example` instance in ``test`` 

 

- ``got`` -- a string, the result of running ``example`` 

 

OUTPUT: 

 

- prints a report to ``out`` 

 

- if in debugging mode, starts an IPython prompt at the point 

of the failure 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.misc.misc import walltime 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=True, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: ex = doctests[0].examples[0] 

sage: ex.walltime = 0.0 

sage: DTR.report_success(sys.stdout.write, doctests[0], ex, '1764') 

ok [0.00 s] 

""" 

# We completely replace doctest.DocTestRunner.report_success so that we can include time taken for the test 

if self._verbose: 

out("ok [%.2f s]\n"%example.walltime) 

 

def report_failure(self, out, test, example, got, globs): 

r""" 

Called when a doctest fails. 

 

INPUT: 

 

- ``out`` -- a function for printing 

 

- ``test`` -- a :class:`doctest.DocTest` instance 

 

- ``example`` -- a :class:`doctest.Example` instance in ``test`` 

 

- ``got`` -- a string, the result of running ``example`` 

 

- ``globs`` -- a dictionary of globals, used if in debugging mode 

 

OUTPUT: 

 

- prints a report to ``out`` 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=True, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: ex = doctests[0].examples[0] 

sage: DTR.no_failure_yet = True 

sage: DTR.report_failure(sys.stdout.write, doctests[0], ex, 'BAD ANSWER\n', {}) 

********************************************************************** 

File ".../sage/doctest/forker.py", line 11, in sage.doctest.forker 

Failed example: 

doctest_var = 42; doctest_var^2 

Expected: 

1764 

Got: 

BAD ANSWER 

 

If debugging is turned on this function starts an IPython 

prompt when a test returns an incorrect answer:: 

 

sage: import os 

sage: os.environ['SAGE_PEXPECT_LOG'] = "1" 

sage: sage0.quit() 

sage: _ = sage0.eval("import doctest, sys, os, multiprocessing, subprocess") 

sage: _ = sage0.eval("from sage.doctest.parsing import SageOutputChecker") 

sage: _ = sage0.eval("import sage.doctest.forker as sdf") 

sage: _ = sage0.eval("from sage.doctest.control import DocTestDefaults") 

sage: _ = sage0.eval("DD = DocTestDefaults(debug=True)") 

sage: _ = sage0.eval("ex1 = doctest.Example('a = 17', '')") 

sage: _ = sage0.eval("ex2 = doctest.Example('2*a', '1')") 

sage: _ = sage0.eval("DT = doctest.DocTest([ex1,ex2], globals(), 'doubling', None, 0, None)") 

sage: _ = sage0.eval("DTR = sdf.SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)") 

sage: print(sage0.eval("sdf.init_sage(); DTR.run(DT, clear_globs=False)")) # indirect doctest 

********************************************************************** 

Line 1, in doubling 

Failed example: 

2*a 

Expected: 

1 

Got: 

34 

********************************************************************** 

Previously executed commands: 

sage: sage0._expect.expect('sage: ') # sage0 just mis-identified the output as prompt, synchronize 

0 

sage: sage0.eval("a") 

'...17' 

sage: sage0.eval("quit") 

'Returning to doctests...TestResults(failed=1, attempted=2)' 

""" 

if not self.options.initial or self.no_failure_yet: 

self.no_failure_yet = False 

returnval = doctest.DocTestRunner.report_failure(self, out, test, example, got) 

if self.options.debug: 

self._fakeout.stop_spoofing() 

restore_tcpgrp = None 

try: 

if os.isatty(0): 

# In order to read from the terminal, we need 

# to make the current process group the 

# foreground group. 

restore_tcpgrp = os.tcgetpgrp(0) 

signal.signal(signal.SIGTTIN, signal.SIG_IGN) 

signal.signal(signal.SIGTTOU, signal.SIG_IGN) 

os.tcsetpgrp(0, os.getpgrp()) 

print("*" * 70) 

print("Previously executed commands:") 

for ex in test.examples: 

if ex is example: 

break 

if hasattr(ex, 'sage_source'): 

src = ' sage: ' + ex.sage_source 

else: 

src = ' sage: ' + ex.source 

if src[-1] == '\n': 

src = src[:-1] 

src = src.replace('\n', '\n ....: ') 

print(src) 

if ex.want: 

print(doctest._indent(ex.want[:-1])) 

from sage.repl.configuration import sage_ipython_config 

from sage.repl.prompts import DebugPrompts 

from IPython.terminal.embed import InteractiveShellEmbed 

cfg = sage_ipython_config.default() 

# Currently this doesn't work: prompts only work in pty 

# We keep simple_prompt=True, prompts will be "In [0]:" 

# cfg.InteractiveShell.prompts_class = DebugPrompts 

# cfg.InteractiveShell.simple_prompt = False 

shell = InteractiveShellEmbed(config=cfg, banner1='', user_ns=dict(globs)) 

shell(header='', stack_depth=2) 

except KeyboardInterrupt: 

# Assume this is a *real* interrupt. We need to 

# escalate this to the master docbuilding process. 

if not self.options.serial: 

os.kill(os.getppid(), signal.SIGINT) 

raise 

finally: 

# Restore the foreground process group. 

if restore_tcpgrp is not None: 

os.tcsetpgrp(0, restore_tcpgrp) 

signal.signal(signal.SIGTTIN, signal.SIG_DFL) 

signal.signal(signal.SIGTTOU, signal.SIG_DFL) 

print("Returning to doctests...") 

self._fakeout.start_spoofing() 

return returnval 

 

def report_overtime(self, out, test, example, got): 

r""" 

Called when the ``warn_long`` option flag is set and a doctest 

runs longer than the specified time. 

 

INPUT: 

 

- ``out`` -- a function for printing 

 

- ``test`` -- a :class:`doctest.DocTest` instance 

 

- ``example`` -- a :class:`doctest.Example` instance in ``test`` 

 

- ``got`` -- a string, the result of running ``example`` 

 

OUTPUT: 

 

- prints a report to ``out`` 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.misc.misc import walltime 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=True, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: ex = doctests[0].examples[0] 

sage: ex.walltime = 1.23 

sage: DTR.report_overtime(sys.stdout.write, doctests[0], ex, 'BAD ANSWER\n') 

********************************************************************** 

File ".../sage/doctest/forker.py", line 11, in sage.doctest.forker 

Warning, slow doctest: 

doctest_var = 42; doctest_var^2 

Test ran for 1.23 s 

""" 

out(self._failure_header(test, example, 'Warning, slow doctest:') + 

'Test ran for %.2f s\n' % example.walltime) 

 

def report_unexpected_exception(self, out, test, example, exc_info): 

r""" 

Called when a doctest raises an exception that's not matched by the expected output. 

 

If debugging has been turned on, starts an interactive debugger. 

 

INPUT: 

 

- ``out`` -- a function for printing 

 

- ``test`` -- a :class:`doctest.DocTest` instance 

 

- ``example`` -- a :class:`doctest.Example` instance in ``test`` 

 

- ``exc_info`` -- the result of ``sys.exc_info()`` 

 

OUTPUT: 

 

- prints a report to ``out`` 

 

- if in debugging mode, starts PDB with the given traceback 

 

EXAMPLES:: 

 

sage: import os 

sage: os.environ['SAGE_PEXPECT_LOG'] = "1" 

sage: sage0.quit() 

sage: _ = sage0.eval("import doctest, sys, os, multiprocessing, subprocess") 

sage: _ = sage0.eval("from sage.doctest.parsing import SageOutputChecker") 

sage: _ = sage0.eval("import sage.doctest.forker as sdf") 

sage: _ = sage0.eval("from sage.doctest.control import DocTestDefaults") 

sage: _ = sage0.eval("DD = DocTestDefaults(debug=True)") 

sage: _ = sage0.eval("ex = doctest.Example('E = EllipticCurve([0,0]); E', 'A singular Elliptic Curve')") 

sage: _ = sage0.eval("DT = doctest.DocTest([ex], globals(), 'singular_curve', None, 0, None)") 

sage: _ = sage0.eval("DTR = sdf.SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS)") 

sage: old_prompt = sage0._prompt 

sage: sage0._prompt = r"\(Pdb\) " 

sage: sage0.eval("DTR.run(DT, clear_globs=False)") # indirect doctest 

'... ArithmeticError("invariants " + str(ainvs) + " define a singular curve")' 

sage: sage0.eval("l") 

'...if self.discriminant() == 0:...raise ArithmeticError...' 

sage: sage0.eval("u") 

'...EllipticCurve_field.__init__(self, K, ainvs)' 

sage: sage0.eval("p ainvs") 

'(0, 0, 0, 0, 0)' 

sage: sage0._prompt = old_prompt 

sage: sage0.eval("quit") 

'TestResults(failed=1, attempted=1)' 

""" 

if not self.options.initial or self.no_failure_yet: 

self.no_failure_yet = False 

returnval = doctest.DocTestRunner.report_unexpected_exception(self, out, test, example, exc_info) 

if self.options.debug: 

self._fakeout.stop_spoofing() 

restore_tcpgrp = None 

try: 

if os.isatty(0): 

# In order to read from the terminal, we need 

# to make the current process group the 

# foreground group. 

restore_tcpgrp = os.tcgetpgrp(0) 

signal.signal(signal.SIGTTIN, signal.SIG_IGN) 

signal.signal(signal.SIGTTOU, signal.SIG_IGN) 

os.tcsetpgrp(0, os.getpgrp()) 

 

exc_type, exc_val, exc_tb = exc_info 

if exc_tb is None: 

raise RuntimeError( 

"could not start the debugger for an unexpected " 

"exception, probably due to an unhandled error " 

"in a C extension module") 

self.debugger.reset() 

self.debugger.interaction(None, exc_tb) 

except KeyboardInterrupt: 

# Assume this is a *real* interrupt. We need to 

# escalate this to the master docbuilding process. 

if not self.options.serial: 

os.kill(os.getppid(), signal.SIGINT) 

raise 

finally: 

# Restore the foreground process group. 

if restore_tcpgrp is not None: 

os.tcsetpgrp(0, restore_tcpgrp) 

signal.signal(signal.SIGTTIN, signal.SIG_DFL) 

signal.signal(signal.SIGTTOU, signal.SIG_DFL) 

self._fakeout.start_spoofing() 

return returnval 

 

def update_results(self, D): 

""" 

When returning results we pick out the results of interest 

since many attributes are not pickleable. 

 

INPUT: 

 

- ``D`` -- a dictionary to update with cputime and walltime 

 

OUTPUT: 

 

- the number of failures (or False if there is no failure attribute) 

 

EXAMPLES:: 

 

sage: from sage.doctest.parsing import SageOutputChecker 

sage: from sage.doctest.forker import SageDocTestRunner 

sage: from sage.doctest.sources import FileDocTestSource, DictAsObject 

sage: from sage.doctest.control import DocTestDefaults; DD = DocTestDefaults() 

sage: from sage.env import SAGE_SRC 

sage: import doctest, sys, os 

sage: DTR = SageDocTestRunner(SageOutputChecker(), verbose=False, sage_options=DD, optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS) 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','forker.py') 

sage: FDS = FileDocTestSource(filename,DD) 

sage: doctests, extras = FDS.create_doctests(globals()) 

sage: from sage.doctest.util import Timer 

sage: T = Timer().start() 

sage: DTR.run(doctests[0]) 

TestResults(failed=0, attempted=4) 

sage: T.stop().annotate(DTR) 

sage: D = DictAsObject({'cputime':[],'walltime':[],'err':None}) 

sage: DTR.update_results(D) 

0 

sage: sorted(list(D.items())) 

[('cputime', [...]), ('err', None), ('failures', 0), ('walltime', [...])] 

""" 

for key in ["cputime","walltime"]: 

if key not in D: 

D[key] = [] 

if hasattr(self, key): 

D[key].append(self.__dict__[key]) 

if hasattr(self, 'failures'): 

D['failures'] = self.failures 

return self.failures 

else: 

return False 

 

def dummy_handler(sig, frame): 

""" 

Dummy signal handler for SIGCHLD (just to ensure the signal 

isn't ignored). 

 

TESTS:: 

 

sage: import signal 

sage: from sage.doctest.forker import dummy_handler 

sage: _ = signal.signal(signal.SIGUSR1, dummy_handler) 

sage: os.kill(os.getpid(), signal.SIGUSR1) 

sage: signal.signal(signal.SIGUSR1, signal.SIG_DFL) 

<function dummy_handler at ...> 

""" 

pass 

 

 

class DocTestDispatcher(SageObject): 

""" 

Creates parallel :class:`DocTestWorker` processes and dispatches 

doctesting tasks. 

""" 

def __init__(self, controller): 

""" 

INPUT: 

 

- ``controller`` -- a :class:`sage.doctest.control.DocTestController` instance 

 

EXAMPLES:: 

 

sage: from sage.doctest.control import DocTestController, DocTestDefaults 

sage: from sage.doctest.forker import DocTestDispatcher 

sage: DocTestDispatcher(DocTestController(DocTestDefaults(), [])) 

<sage.doctest.forker.DocTestDispatcher object at ...> 

""" 

self.controller = controller 

init_sage() 

 

def serial_dispatch(self): 

""" 

Run the doctests from the controller's specified sources in series. 

 

There is no graceful handling for signals, no possibility of 

interrupting tests and no timeout. 

 

EXAMPLES:: 

 

sage: from sage.doctest.control import DocTestController, DocTestDefaults 

sage: from sage.doctest.forker import DocTestDispatcher 

sage: from sage.doctest.reporting import DocTestReporter 

sage: from sage.doctest.util import Timer 

sage: from sage.env import SAGE_SRC 

sage: import os 

sage: homset = os.path.join(SAGE_SRC, 'sage', 'rings', 'homset.py') 

sage: ideal = os.path.join(SAGE_SRC, 'sage', 'rings', 'ideal.py') 

sage: DC = DocTestController(DocTestDefaults(), [homset, ideal]) 

sage: DC.expand_files_into_sources() 

sage: DD = DocTestDispatcher(DC) 

sage: DR = DocTestReporter(DC) 

sage: DC.reporter = DR 

sage: DC.dispatcher = DD 

sage: DC.timer = Timer().start() 

sage: DD.serial_dispatch() 

sage -t .../rings/homset.py 

[... tests, ... s] 

sage -t .../rings/ideal.py 

[... tests, ... s] 

""" 

for source in self.controller.sources: 

heading = self.controller.reporter.report_head(source) 

if not self.controller.options.only_errors: 

self.controller.log(heading) 

 

with tempfile.TemporaryFile() as outtmpfile: 

result = DocTestTask(source)(self.controller.options, 

outtmpfile, self.controller.logger, 

coverage=self.controller.coverage) 

outtmpfile.seek(0) 

output = bytes_to_str(outtmpfile.read()) 

 

self.controller.reporter.report(source, False, 0, result, output) 

if self.controller.options.exitfirst and result[1].failures: 

break 

 

def parallel_dispatch(self): 

r""" 

Run the doctests from the controller's specified sources in parallel. 

 

This creates :class:`DocTestWorker` subprocesses, while the master 

process checks for timeouts and collects and displays the results. 

 

EXAMPLES:: 

 

sage: from sage.doctest.control import DocTestController, DocTestDefaults 

sage: from sage.doctest.forker import DocTestDispatcher 

sage: from sage.doctest.reporting import DocTestReporter 

sage: from sage.doctest.util import Timer 

sage: from sage.env import SAGE_SRC 

sage: import os 

sage: crem = os.path.join(SAGE_SRC, 'sage', 'databases', 'cremona.py') 

sage: bigo = os.path.join(SAGE_SRC, 'sage', 'rings', 'big_oh.py') 

sage: DC = DocTestController(DocTestDefaults(), [crem, bigo]) 

sage: DC.expand_files_into_sources() 

sage: DD = DocTestDispatcher(DC) 

sage: DR = DocTestReporter(DC) 

sage: DC.reporter = DR 

sage: DC.dispatcher = DD 

sage: DC.timer = Timer().start() 

sage: DD.parallel_dispatch() 

sage -t .../databases/cremona.py 

[... tests, ... s] 

sage -t .../rings/big_oh.py 

[... tests, ... s] 

 

If the ``exitfirst=True`` option is given, the results for a failing 

module will be immediately printed and any other ongoing tests 

canceled:: 

 

sage: test1 = os.path.join(SAGE_TMP, 'test1.py') 

sage: test2 = os.path.join(SAGE_TMP, 'test2.py') 

sage: with open(test1, 'w') as f: 

....: _ = f.write("'''\nsage: import time; time.sleep(60)\n'''") 

sage: with open(test2, 'w') as f: 

....: _ = f.write("'''\nsage: True\nFalse\n'''") 

sage: DC = DocTestController(DocTestDefaults(exitfirst=True, 

....: nthreads=2), 

....: [test1, test2]) 

sage: DC.expand_files_into_sources() 

sage: DD = DocTestDispatcher(DC) 

sage: DR = DocTestReporter(DC) 

sage: DC.reporter = DR 

sage: DC.dispatcher = DD 

sage: DC.timer = Timer().start() 

sage: DD.parallel_dispatch() 

sage -t .../test2.py 

********************************************************************** 

File ".../test2.py", line 2, in test2 

Failed example: 

True 

Expected: 

False 

Got: 

True 

********************************************************************** 

1 item had failures: 

1 of 1 in test2 

[1 test, 1 failure, ... s] 

Killing test .../test1.py 

""" 

opt = self.controller.options 

source_iter = iter(self.controller.sources) 

 

# If timeout was 0, simply set a very long time 

if opt.timeout <= 0: 

opt.timeout = 2**60 

# Timeout we give a process to die (after it received a SIGQUIT 

# signal). If it doesn't exit by itself in this many seconds, we 

# SIGKILL it. This is 5% of doctest timeout, with a maximum of 

# 10 minutes and a minimum of 60 seconds. 

die_timeout = opt.timeout * 0.05 

if die_timeout > 600: 

die_timeout = 600 

elif die_timeout < 60: 

die_timeout = 60 

 

# List of alive DocTestWorkers (child processes). Workers which 

# are done but whose messages have not been read are also 

# considered alive. 

workers = [] 

 

# List of DocTestWorkers which have finished running but 

# whose results have not been reported yet. 

finished = [] 

 

# If exitfirst is set and we got a failure. 

abort_now = False 

 

# One particular worker that we are "following": we report the 

# messages while it's running. For other workers, we report the 

# messages if there is no followed worker. 

follow = None 

 

# Install signal handler for SIGCHLD 

signal.signal(signal.SIGCHLD, dummy_handler) 

 

# Logger 

log = self.controller.log 

 

from cysignals.pselect import PSelecter 

try: 

# Block SIGCHLD and SIGINT except during the pselect() call 

with PSelecter([signal.SIGCHLD, signal.SIGINT]) as sel: 

# Function to execute in the child process which exits 

# this "with" statement (which restores the signal mask) 

# and resets to SIGCHLD handler to default. 

# Since multiprocessing.Process is implemented using 

# fork(), signals would otherwise remain blocked in the 

# child process. 

def sel_exit(): 

signal.signal(signal.SIGCHLD, signal.SIG_DFL) 

sel.__exit__(None, None, None) 

 

while True: 

# To avoid calling time.time() all the time while 

# checking for timeouts, we call it here, once per 

# loop. It's not a problem if this isn't very 

# precise, doctest timeouts don't need millisecond 

# precision. 

now = time.time() 

 

# If there were any substantial changes in the state 

# (new worker started or finished worker reported), 

# restart this while loop instead of calling pselect(). 

# This ensures internal consistency and a reasonably 

# accurate value for "now". 

restart = False 

 

# Process all workers. Check for timeouts on active 

# workers and move finished/crashed workers to the 

# "finished" list. 

# Create a new list "new_workers" containing the active 

# workers (to avoid updating "workers" in place). 

new_workers = [] 

for w in workers: 

if w.rmessages is not None or w.is_alive(): 

if now >= w.deadline: 

# Timeout => (try to) kill the process 

# group (which normally includes 

# grandchildren) and close the message 

# pipe. 

# We don't report the timeout yet, we wait 

# until the process has actually died. 

w.kill() 

w.deadline = now + die_timeout 

if not w.is_alive(): 

# Worker is done but we haven't read all 

# messages (possibly a grandchild still 

# has the messages pipe open). 

# Adjust deadline to read all messages: 

newdeadline = now + die_timeout 

if w.deadline > newdeadline: 

w.deadline = newdeadline 

new_workers.append(w) 

else: 

# Save the result and output of the worker 

# and close the associated file descriptors. 

# It is important to do this now. If we 

# would leave them open until we call 

# report(), parallel testing can easily fail 

# with a "Too many open files" error. 

w.save_result_output() 

finished.append(w) 

workers = new_workers 

 

# Similarly, process finished workers. 

new_finished = [] 

for w in finished: 

if opt.exitfirst and w.result[1].failures: 

abort_now = True 

elif follow is not None and follow is not w: 

# We are following a different worker, so 

# we cannot report now. 

new_finished.append(w) 

continue 

 

# Report the completion of this worker 

log(w.messages, end="") 

self.controller.reporter.report( 

w.source, 

w.killed, 

w.exitcode, 

w.result, 

w.output, 

pid=w.pid) 

 

restart = True 

follow = None 

 

finished = new_finished 

 

if abort_now: 

break 

 

# Start new workers if possible 

while source_iter is not None and len(workers) < opt.nthreads: 

try: 

source = next(source_iter) 

except StopIteration: 

source_iter = None 

else: 

# Start a new worker. 

w = DocTestWorker( 

source, opt, 

funclist=[sel_exit], 

coverage=self.controller.coverage) 

heading = self.controller.reporter.report_head(w.source) 

if not self.controller.options.only_errors: 

w.messages = heading + "\n" 

# Store length of heading to detect if the 

# worker has something interesting to report. 

w.heading_len = len(w.messages) 

w.start() # This might take some time 

w.deadline = time.time() + opt.timeout 

workers.append(w) 

restart = True 

 

# Recompute state if needed 

if restart: 

continue 

 

# We are finished if there are no DocTestWorkers left 

if len(workers) == 0: 

# If there are no active workers, we should have 

# reported all finished workers. 

assert len(finished) == 0 

break 

 

# The master pselect() call 

rlist = [w.rmessages for w in workers if w.rmessages is not None] 

tmout = min(w.deadline for w in workers) - now 

if tmout > 5: # Wait at most 5 seconds 

tmout = 5 

rlist, _, _, _ = sel.pselect(rlist, timeout=tmout) 

 

# Read messages 

for w in workers: 

if w.rmessages is not None and w.rmessages in rlist: 

w.read_messages() 

 

# Find a worker to follow: if there is only one worker, 

# always follow it. Otherwise, take the worker with 

# the earliest deadline of all workers whose 

# messages are more than just the heading. 

if follow is None: 

if len(workers) == 1: 

follow = workers[0] 

else: 

for w in workers: 

if len(w.messages) > w.heading_len: 

if follow is None or w.deadline < follow.deadline: 

follow = w 

 

# Write messages of followed worker 

if follow is not None: 

log(follow.messages, end="") 

follow.messages = "" 

finally: 

# Restore SIGCHLD handler (which is to ignore the signal) 

signal.signal(signal.SIGCHLD, signal.SIG_DFL) 

 

# Kill all remaining workers (in case we got interrupted) 

for w in workers: 

if w.kill(): 

log("Killing test %s" % w.source.printpath) 

# Fork a child process with the specific purpose of 

# killing the remaining workers. 

if len(workers) > 0 and os.fork() == 0: 

# Block these signals 

with PSelecter([signal.SIGQUIT, signal.SIGINT]): 

try: 

from time import sleep 

sleep(die_timeout) 

for w in workers: 

w.kill() 

finally: 

os._exit(0) 

 

# Hack to ensure multiprocessing leaves these processes 

# alone (in particular, it doesn't wait for them when we 

# exit). 

multiprocessing.current_process()._children = set() 

 

def dispatch(self): 

""" 

Run the doctests for the controller's specified sources, 

by calling :meth:`parallel_dispatch` or :meth:`serial_dispatch` 

according to the ``--serial`` option. 

 

EXAMPLES:: 

 

sage: from sage.doctest.control import DocTestController, DocTestDefaults 

sage: from sage.doctest.forker import DocTestDispatcher 

sage: from sage.doctest.reporting import DocTestReporter 

sage: from sage.doctest.util import Timer 

sage: from sage.env import SAGE_SRC 

sage: import os 

sage: freehom = os.path.join(SAGE_SRC, 'sage', 'modules', 'free_module_homspace.py') 

sage: bigo = os.path.join(SAGE_SRC, 'sage', 'rings', 'big_oh.py') 

sage: DC = DocTestController(DocTestDefaults(), [freehom, bigo]) 

sage: DC.expand_files_into_sources() 

sage: DD = DocTestDispatcher(DC) 

sage: DR = DocTestReporter(DC) 

sage: DC.reporter = DR 

sage: DC.dispatcher = DD 

sage: DC.timer = Timer().start() 

sage: DD.dispatch() 

sage -t .../sage/modules/free_module_homspace.py 

[... tests, ... s] 

sage -t .../sage/rings/big_oh.py 

[... tests, ... s] 

""" 

if self.controller.options.serial: 

self.serial_dispatch() 

else: 

self.parallel_dispatch() 

 

self.controller.collect_coverage() 

 

 

class DocTestWorker(multiprocessing.Process): 

""" 

The DocTestWorker process runs one :class:`DocTestTask` for a given 

source. It returns messages about doctest failures (or all tests if 

verbose doctesting) though a pipe and returns results through a 

``multiprocessing.Queue`` instance (both these are created in the 

:meth:`start` method). 

 

It runs the task in its own process-group, such that killing the 

process group kills this process together with its child processes. 

 

The class has additional methods and attributes for bookkeeping 

by the master process. Except in :meth:`run`, nothing from this 

class should be accessed by the child process. 

 

INPUT: 

 

- ``source`` -- a :class:`DocTestSource` instance 

 

- ``options`` -- an object representing doctest options. 

 

- ``funclist`` -- a list of callables to be called at the start of 

the child process. 

 

- ``coverage`` -- an optional coverage controller 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import DocTestWorker 

sage: from sage.doctest.control import DocTestController 

sage: from sage.doctest.reporting import DocTestReporter 

sage: from sage.env import SAGE_SRC 

sage: filename = os.path.join(SAGE_SRC, 'sage', 'doctest', 'util.py') 

sage: W = DocTestWorker.for_file(filename) 

sage: controller = DocTestController(W.options, filename) 

sage: reporter = DocTestReporter(controller) 

sage: W.start() 

sage: W.join() # Wait for worker to finish 

sage: result = W.result_queue.get() 

sage: reporter.report(W.source, False, W.exitcode, result, "") 

[... tests, ... s] 

""" 

def __init__(self, source, options, funclist=[], coverage=None): 

""" 

Initialization. 

 

TESTS:: 

 

sage: run_doctests(sage.rings.big_oh) # indirect doctest 

Running doctests with ID ... 

Doctesting 1 file. 

sage -t .../sage/rings/big_oh.py 

[... tests, ... s] 

---------------------------------------------------------------------- 

All tests passed! 

---------------------------------------------------------------------- 

Total time for all tests: ... seconds 

cpu time: ... seconds 

cumulative wall time: ... seconds 

""" 

multiprocessing.Process.__init__(self) 

 

self.source = source 

self.options = options 

self.funclist = funclist 

 

# Open pipe for messages. These are raw file descriptors, 

# not Python file objects! 

self.rmessages, self.wmessages = os.pipe() 

 

# Create Queue for the result. Since we're running only one 

# doctest, this "queue" will contain only 1 element. 

self.result_queue = multiprocessing.Queue(1) 

 

# Temporary file for stdout/stderr of the child process. 

# Normally, this isn't used in the master process except to 

# debug timeouts/crashes. 

self.outtmpfile = tempfile.TemporaryFile() 

 

# Create string for the master process to store the messages 

# (usually these are the doctest failures) of the child. 

# These messages are read through the pipe created above. 

self.messages = "" 

 

# Has this worker been killed (because of a time out)? 

self.killed = False 

 

self.coverage = coverage 

if coverage is not None: 

coverage.data_suffix = source.basename 

 

@classmethod 

def for_file(cls, filename, funclist=[], **options): 

""" 

Shortcut constructor used for primarily for testing. 

 

Constructs the :class:`DocTestDefaults` and :class:`FileDocTestSource`` 

objects for a single filename. 

 

INPUT: 

 

- ``filename`` -- the path to a file to test 

 

- ``funclist`` -- same as the ``funclist`` argument to the main 

:class:DocTestTask: constructor 

 

- ``**options`` -- any overrides to the default options in 

:class:`DocTestDefaults` 

""" 

 

from .control import DocTestDefaults 

options = DocTestDefaults(**options) 

source = FileDocTestSource(filename, options) 

return cls(source, options, funclist=funclist) 

 

def run(self): 

""" 

Runs the :class:`DocTestTask` under its own PGID. 

 

TESTS:: 

 

sage: run_doctests(sage.symbolic.units) # indirect doctest 

Running doctests with ID ... 

Doctesting 1 file. 

sage -t .../sage/symbolic/units.py 

[... tests, ... s] 

---------------------------------------------------------------------- 

All tests passed! 

---------------------------------------------------------------------- 

Total time for all tests: ... seconds 

cpu time: ... seconds 

cumulative wall time: ... seconds 

""" 

os.setpgid(os.getpid(), os.getpid()) 

 

# Run functions 

for f in self.funclist: 

f() 

 

# Write one byte to the pipe to signal to the master process 

# that we have started properly. 

os.write(self.wmessages, b"X") 

 

task = DocTestTask(self.source) 

 

# Ensure the Python stdin is the actual stdin 

# (multiprocessing redirects this). 

# We will do a more proper redirect of stdin in SageSpoofInOut. 

try: 

sys.stdin = os.fdopen(0, "r") 

except OSError: 

# We failed to open stdin for reading, this might happen 

# for example when running under "nohup" (Trac #14307). 

# Simply redirect stdin from /dev/null and try again. 

with open(os.devnull) as f: 

os.dup2(f.fileno(), 0) 

sys.stdin = os.fdopen(0, "r") 

 

# Close the reading end of the pipe (only the master should 

# read from the pipe) and open the writing end. 

os.close(self.rmessages) 

msgpipe = os.fdopen(self.wmessages, "w") 

try: 

task(self.options, self.outtmpfile, msgpipe, self.result_queue, 

self.coverage) 

finally: 

msgpipe.close() 

# Note: This closes the tempfile in the child process, but in the 

# parent process self.outtmpfile will not be closed yet, and can 

# still be accessed in save_result_output 

if hasattr(self.outtmpfile, 'delete'): 

# On some platforms (notably Cygwin) tempfile.TemporaryFile is 

# actually replaced by tempfile.NamedTemporaryFile with 

# delete=True for this file 

# This means that we end up with two NamedTemporaryFile 

# instances--one on the parent process and one on the worker 

# process. Since NamedTemporaryFile automatically unlinks the 

# file when it is closed, this can lead to an unhandled 

# exception in the parent process if the child process closes 

# this file first. See https://trac.sagemath.org/ticket/25107#comment:14 

# for more details. 

self.outtmpfile.delete = False 

 

self.outtmpfile.close() 

 

def start(self): 

""" 

Start the worker and close the writing end of the message pipe. 

 

TESTS:: 

 

sage: from sage.doctest.forker import DocTestWorker 

sage: from sage.env import SAGE_SRC 

sage: filename = os.path.join(SAGE_SRC, 'sage', 'doctest', 

....: 'util.py') 

sage: W = DocTestWorker.for_file(filename) 

sage: W.start() 

sage: try: 

....: os.fstat(W.wmessages) 

....: except OSError: 

....: print("Write end of pipe successfully closed") 

Write end of pipe successfully closed 

sage: W.join() # Wait for worker to finish 

""" 

super(DocTestWorker, self).start() 

 

# Close the writing end of the pipe (only the child should 

# write to the pipe). 

os.close(self.wmessages) 

 

# Read one byte from the pipe as a sign that the child process 

# has properly started (to avoid race conditions). In particular, 

# it will have its process group changed. 

os.read(self.rmessages, 1) 

 

def read_messages(self): 

""" 

In the master process, read from the pipe and store the data 

read in the ``messages`` attribute. 

 

.. NOTE:: 

 

This function may need to be called multiple times in 

order to read all of the messages. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import DocTestWorker 

sage: from sage.env import SAGE_SRC 

sage: filename = os.path.join(SAGE_SRC, 'sage', 'doctest', 

....: 'util.py') 

sage: W = DocTestWorker.for_file(filename, verbose=True, 

....: nthreads=2) 

sage: W.start() 

sage: while W.rmessages is not None: 

....: W.read_messages() 

sage: W.join() 

sage: len(W.messages) > 0 

True 

""" 

# It's absolutely important to execute only one read() system 

# call, more might block. Assuming that we used pselect() 

# correctly, one read() will not block. 

if self.rmessages is not None: 

s = os.read(self.rmessages, 4096) 

self.messages += bytes_to_str(s) 

if len(s) == 0: # EOF 

os.close(self.rmessages) 

self.rmessages = None 

 

def save_result_output(self): 

""" 

Annotate ``self`` with ``self.result`` (the result read through 

the ``result_queue`` and with ``self.output``, the complete 

contents of ``self.outtmpfile``. Then close the Queue and 

``self.outtmpfile``. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import DocTestWorker 

sage: from sage.env import SAGE_SRC 

sage: filename = os.path.join(SAGE_SRC, 'sage', 'doctest', 

....: 'util.py') 

sage: W = DocTestWorker.for_file(filename) 

sage: W.start() 

sage: W.join() 

sage: W.save_result_output() 

sage: sorted(W.result[1].keys()) 

['cputime', 'err', 'failures', 'optionals', 'walltime'] 

sage: len(W.output) > 0 

True 

 

.. NOTE:: 

 

This method is called from the parent process, not from the 

subprocess. 

""" 

from six.moves.queue import Empty 

try: 

self.result = self.result_queue.get(block=False) 

except Empty: 

self.result = (0, DictAsObject(dict(err='noresult'))) 

del self.result_queue 

 

self.outtmpfile.seek(0) 

self.output = bytes_to_str(self.outtmpfile.read()) 

del self.outtmpfile 

 

def kill(self): 

""" 

Kill this worker. Returns ``True`` if the signal(s) are sent 

successfully or ``False`` if the worker process no longer exists. 

 

This method is only called if there is something wrong with the 

worker. Under normal circumstances, the worker is supposed to 

exit by itself after finishing. 

 

The first time this is called, use ``SIGQUIT``. This will trigger 

the cysignals ``SIGQUIT`` handler and try to print an enhanced 

traceback. 

 

Subsequent times, use ``SIGKILL``. Also close the message pipe 

if it was still open. 

 

EXAMPLES:: 

 

sage: import time 

sage: from sage.doctest.forker import DocTestWorker 

sage: from sage.env import SAGE_SRC 

sage: filename = os.path.join(SAGE_SRC, 'sage', 'doctest', 

....: 'tests', '99seconds.rst') 

 

We set up the worker to start by blocking ``SIGQUIT``, such that 

killing will fail initially:: 

 

sage: from cysignals.pselect import PSelecter 

sage: import signal 

sage: def block_hup(): 

....: # We never __exit__() 

....: PSelecter([signal.SIGQUIT]).__enter__() 

sage: W = DocTestWorker.for_file(filename, [block_hup]) 

sage: W.start() 

sage: W.killed 

False 

sage: W.kill() 

True 

sage: W.killed 

True 

sage: time.sleep(0.2) # Worker doesn't die 

sage: W.kill() # Worker dies now 

True 

sage: time.sleep(1) 

sage: W.is_alive() 

False 

""" 

 

if self.rmessages is not None: 

os.close(self.rmessages) 

self.rmessages = None 

 

try: 

if not self.killed: 

self.killed = True 

os.killpg(self.pid, signal.SIGQUIT) 

else: 

os.killpg(self.pid, signal.SIGKILL) 

except OSError as exc: 

# Handle a race condition where the process has exited on 

# its own by the time we get here, and ESRCH is returned 

# indicating no processes in the specified process group 

if exc.errno != errno.ESRCH: 

raise 

 

return False 

 

return True 

 

 

class DocTestTask(object): 

""" 

This class encapsulates the tests from a single source. 

 

This class does not insulate from problems in the source 

(e.g. entering an infinite loop or causing a segfault), that has to 

be dealt with at a higher level. 

 

INPUT: 

 

- ``source`` -- a :class:`sage.doctest.sources.DocTestSource` instance. 

 

- ``verbose`` -- boolean, controls reporting of progress by :class:`doctest.DocTestRunner`. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import DocTestTask 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults, DocTestController 

sage: from sage.env import SAGE_SRC 

sage: import os 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','sources.py') 

sage: DD = DocTestDefaults() 

sage: FDS = FileDocTestSource(filename,DD) 

sage: DTT = DocTestTask(FDS) 

sage: DC = DocTestController(DD,[filename]) 

sage: ntests, results = DTT(options=DD) 

sage: ntests >= 300 or ntests 

True 

sage: sorted(results.keys()) 

['cputime', 'err', 'failures', 'optionals', 'walltime'] 

""" 

def __init__(self, source): 

""" 

Initialization. 

 

TESTS:: 

 

sage: from sage.doctest.forker import DocTestTask 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults 

sage: from sage.env import SAGE_SRC 

sage: import os 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','sources.py') 

sage: FDS = FileDocTestSource(filename,DocTestDefaults()) 

sage: DocTestTask(FDS) 

<sage.doctest.forker.DocTestTask object at ...> 

""" 

self.source = source 

 

def __call__(self, options, outtmpfile=None, msgfile=None, 

result_queue=None, coverage=None): 

""" 

Calling the task does the actual work of running the doctests. 

 

INPUT: 

 

- ``options`` -- an object representing doctest options. 

 

- ``outtmpfile`` -- a seekable file that's used by the doctest 

runner to redirect stdout and stderr of the doctests. 

 

- ``msgfile`` -- a file or pipe to send doctest messages about 

doctest failures (or all tests in verbose mode). 

 

- ``result_queue`` -- an instance of :class:`multiprocessing.Queue` 

to store the doctest result. For testing, this can also be None. 

 

- ``coverage`` -- an instance of :class:`coverage.Coverage` for 

tracing test coverage, or ``None`` for no coverage. 

 

OUTPUT: 

 

- ``(doctests, result_dict)`` where ``doctests`` is the number of 

doctests and ``result_dict`` is a dictionary annotated with 

timings and error information. 

 

- Also put ``(doctests, result_dict)`` onto the ``result_queue`` 

if the latter isn't None. 

 

EXAMPLES:: 

 

sage: from sage.doctest.forker import DocTestTask 

sage: from sage.doctest.sources import FileDocTestSource 

sage: from sage.doctest.control import DocTestDefaults, DocTestController 

sage: from sage.env import SAGE_SRC 

sage: import os 

sage: filename = os.path.join(SAGE_SRC,'sage','doctest','parsing.py') 

sage: DD = DocTestDefaults() 

sage: FDS = FileDocTestSource(filename,DD) 

sage: DTT = DocTestTask(FDS) 

sage: DC = DocTestController(DD, [filename]) 

sage: ntests, runner = DTT(options=DD) 

sage: runner.failures 

0 

sage: ntests >= 200 or ntests 

True 

""" 

result = None 

try: 

runner = SageDocTestRunner( 

SageOutputChecker(), 

verbose=options.verbose, 

outtmpfile=outtmpfile, 

msgfile=msgfile, 

sage_options=options, 

optionflags=doctest.NORMALIZE_WHITESPACE|doctest.ELLIPSIS, 

coverage=coverage) 

runner.basename = self.source.basename 

runner.filename = self.source.path 

N = options.file_iterations 

results = DictAsObject(dict(walltime=[],cputime=[],err=None)) 

 

# multiprocessing.Process instances don't run exit 

# functions, so we run the functions added by doctests 

# when exiting this context. 

with restore_atexit(run=True): 

for it in range(N): 

doctests, extras = self._run(runner, options, results) 

runner.summarize(options.verbose) 

if runner.update_results(results): 

break 

 

if extras['tab']: 

results.err = 'tab' 

results.tab_linenos = extras['tab'] 

if extras['line_number']: 

results.err = 'line_number' 

results.optionals = extras['optionals'] 

# We subtract 1 to remove the sig_on_count() tests 

result = (sum(max(0,len(test.examples) - 1) for test in doctests), 

results) 

except BaseException: 

exc_info = sys.exc_info() 

tb = "".join(traceback.format_exception(*exc_info)) 

result = (0, DictAsObject(dict(err=exc_info[0], tb=tb))) 

 

if result_queue is not None: 

result_queue.put(result, False) 

 

if coverage: 

coverage.save() 

 

return result 

 

def _run(self, runner, options, results): 

""" 

Actually run the doctests with the right set of globals 

""" 

if self.source.basename.startswith("sagenb."): 

import sage.all_notebook as sage_all 

else: 

import sage.all_cmdline as sage_all 

dict_all = sage_all.__dict__ 

# Remove '__package__' item from the globals since it is not 

# always in the globals in an actual Sage session. 

dict_all.pop('__package__', None) 

sage_namespace = RecordingDict(dict_all) 

sage_namespace['__name__'] = '__main__' 

doctests, extras = self.source.create_doctests(sage_namespace) 

timer = Timer().start() 

 

for test in doctests: 

result = runner.run(test) 

if options.exitfirst and result.failed: 

break 

 

timer.stop().annotate(runner) 

return doctests, extras