test_tablib.py 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702
  1. #!/usr/bin/env python
  2. # -*- coding: utf-8 -*-
  3. """Tests for Tablib."""
  4. import unittest
  5. import sys
  6. import os
  7. import tablib
  8. from tablib.compat import markup, unicode
  9. class TablibTestCase(unittest.TestCase):
  10. """Tablib test cases."""
  11. def setUp(self):
  12. """Create simple data set with headers."""
  13. global data, book
  14. data = tablib.Dataset()
  15. book = tablib.Databook()
  16. self.headers = ('first_name', 'last_name', 'gpa')
  17. self.john = ('John', 'Adams', 90)
  18. self.george = ('George', 'Washington', 67)
  19. self.tom = ('Thomas', 'Jefferson', 50)
  20. self.founders = tablib.Dataset(headers=self.headers, title='Founders')
  21. self.founders.append(self.john)
  22. self.founders.append(self.george)
  23. self.founders.append(self.tom)
  24. def tearDown(self):
  25. """Teardown."""
  26. pass
  27. def test_empty_append(self):
  28. """Verify append() correctly adds tuple with no headers."""
  29. new_row = (1, 2, 3)
  30. data.append(new_row)
  31. # Verify width/data
  32. self.assertTrue(data.width == len(new_row))
  33. self.assertTrue(data[0] == new_row)
  34. def test_empty_append_with_headers(self):
  35. """Verify append() correctly detects mismatch of number of
  36. headers and data.
  37. """
  38. data.headers = ['first', 'second']
  39. new_row = (1, 2, 3, 4)
  40. self.assertRaises(tablib.InvalidDimensions, data.append, new_row)
  41. def test_set_headers_with_incorrect_dimension(self):
  42. """Verify headers correctly detects mismatch of number of
  43. headers and data.
  44. """
  45. data.append(self.john)
  46. def set_header_callable():
  47. data.headers = ['first_name']
  48. self.assertRaises(tablib.InvalidDimensions, set_header_callable)
  49. def test_add_column(self):
  50. """Verify adding column works with/without headers."""
  51. data.append(['kenneth'])
  52. data.append(['bessie'])
  53. new_col = ['reitz', 'monke']
  54. data.append_col(new_col)
  55. self.assertEqual(data[0], ('kenneth', 'reitz'))
  56. self.assertEqual(data.width, 2)
  57. # With Headers
  58. data.headers = ('fname', 'lname')
  59. new_col = [21, 22]
  60. data.append_col(new_col, header='age')
  61. self.assertEqual(data['age'], new_col)
  62. def test_add_column_no_data_no_headers(self):
  63. """Verify adding new column with no headers."""
  64. new_col = ('reitz', 'monke')
  65. data.append_col(new_col)
  66. self.assertEqual(data[0], tuple([new_col[0]]))
  67. self.assertEqual(data.width, 1)
  68. self.assertEqual(data.height, len(new_col))
  69. def test_add_column_with_header_ignored(self):
  70. """Verify append_col() ignores the header if data.headers has
  71. not previously been set
  72. """
  73. new_col = ('reitz', 'monke')
  74. data.append_col(new_col, header='first_name')
  75. self.assertEqual(data[0], tuple([new_col[0]]))
  76. self.assertEqual(data.width, 1)
  77. self.assertEqual(data.height, len(new_col))
  78. self.assertEqual(data.headers, None)
  79. def test_add_column_with_header_and_headers_only_exist(self):
  80. """Verify append_col() with header correctly detects mismatch when
  81. headers exist but there is no existing row data
  82. """
  83. data.headers = ['first_name']
  84. #no data
  85. new_col = ('allen')
  86. def append_col_callable():
  87. data.append_col(new_col, header='middle_name')
  88. self.assertRaises(tablib.InvalidDimensions, append_col_callable)
  89. def test_add_column_with_header_and_data_exists(self):
  90. """Verify append_col() works when headers and rows exists"""
  91. data.headers = self.headers
  92. data.append(self.john)
  93. new_col = [10];
  94. data.append_col(new_col, header='age')
  95. self.assertEqual(data.height, 1)
  96. self.assertEqual(data.width, len(self.john) + 1)
  97. self.assertEqual(data['age'], new_col)
  98. self.assertEqual(len(data.headers), len(self.headers) + 1)
  99. def test_add_callable_column(self):
  100. """Verify adding column with values specified as callable."""
  101. new_col = lambda x: x[0]
  102. self.founders.append_col(new_col, header='first_again')
  103. def test_header_slicing(self):
  104. """Verify slicing by headers."""
  105. self.assertEqual(self.founders['first_name'],
  106. [self.john[0], self.george[0], self.tom[0]])
  107. self.assertEqual(self.founders['last_name'],
  108. [self.john[1], self.george[1], self.tom[1]])
  109. self.assertEqual(self.founders['gpa'],
  110. [self.john[2], self.george[2], self.tom[2]])
  111. def test_get_col(self):
  112. """Verify getting columns by index"""
  113. self.assertEqual(
  114. self.founders.get_col(list(self.headers).index('first_name')),
  115. [self.john[0], self.george[0], self.tom[0]])
  116. self.assertEqual(
  117. self.founders.get_col(list(self.headers).index('last_name')),
  118. [self.john[1], self.george[1], self.tom[1]])
  119. self.assertEqual(
  120. self.founders.get_col(list(self.headers).index('gpa')),
  121. [self.john[2], self.george[2], self.tom[2]])
  122. def test_data_slicing(self):
  123. """Verify slicing by data."""
  124. # Slice individual rows
  125. self.assertEqual(self.founders[0], self.john)
  126. self.assertEqual(self.founders[:1], [self.john])
  127. self.assertEqual(self.founders[1:2], [self.george])
  128. self.assertEqual(self.founders[-1], self.tom)
  129. self.assertEqual(self.founders[3:], [])
  130. # Slice multiple rows
  131. self.assertEqual(self.founders[:], [self.john, self.george, self.tom])
  132. self.assertEqual(self.founders[0:2], [self.john, self.george])
  133. self.assertEqual(self.founders[1:3], [self.george, self.tom])
  134. self.assertEqual(self.founders[2:], [self.tom])
  135. def test_delete(self):
  136. """Verify deleting from dataset works."""
  137. # Delete from front of object
  138. del self.founders[0]
  139. self.assertEqual(self.founders[:], [self.george, self.tom])
  140. # Verify dimensions, width should NOT change
  141. self.assertEqual(self.founders.height, 2)
  142. self.assertEqual(self.founders.width, 3)
  143. # Delete from back of object
  144. del self.founders[1]
  145. self.assertEqual(self.founders[:], [self.george])
  146. # Verify dimensions, width should NOT change
  147. self.assertEqual(self.founders.height, 1)
  148. self.assertEqual(self.founders.width, 3)
  149. # Delete from invalid index
  150. self.assertRaises(IndexError, self.founders.__delitem__, 3)
  151. def test_csv_export(self):
  152. """Verify exporting dataset object as CSV."""
  153. # Build up the csv string with headers first, followed by each row
  154. csv = ''
  155. for col in self.headers:
  156. csv += col + ','
  157. csv = csv.strip(',') + '\r\n'
  158. for founder in self.founders:
  159. for col in founder:
  160. csv += str(col) + ','
  161. csv = csv.strip(',') + '\r\n'
  162. self.assertEqual(csv, self.founders.csv)
  163. def test_tsv_export(self):
  164. """Verify exporting dataset object as TSV."""
  165. # Build up the tsv string with headers first, followed by each row
  166. tsv = ''
  167. for col in self.headers:
  168. tsv += col + '\t'
  169. tsv = tsv.strip('\t') + '\r\n'
  170. for founder in self.founders:
  171. for col in founder:
  172. tsv += str(col) + '\t'
  173. tsv = tsv.strip('\t') + '\r\n'
  174. self.assertEqual(tsv, self.founders.tsv)
  175. def test_html_export(self):
  176. """HTML export"""
  177. html = markup.page()
  178. html.table.open()
  179. html.thead.open()
  180. html.tr(markup.oneliner.th(self.founders.headers))
  181. html.thead.close()
  182. for founder in self.founders:
  183. html.tr(markup.oneliner.td(founder))
  184. html.table.close()
  185. html = str(html)
  186. self.assertEqual(html, self.founders.html)
  187. def test_html_export_none_value(self):
  188. """HTML export"""
  189. html = markup.page()
  190. html.table.open()
  191. html.thead.open()
  192. html.tr(markup.oneliner.th(['foo','', 'bar']))
  193. html.thead.close()
  194. html.tr(markup.oneliner.td(['foo','', 'bar']))
  195. html.table.close()
  196. html = str(html)
  197. headers = ['foo', None, 'bar'];
  198. d = tablib.Dataset(['foo', None, 'bar'], headers=headers)
  199. self.assertEqual(html, d.html)
  200. def test_unicode_append(self):
  201. """Passes in a single unicode character and exports."""
  202. new_row = (u'å', u'é')
  203. data.append(new_row)
  204. data.json
  205. data.yaml
  206. data.csv
  207. data.tsv
  208. data.xls
  209. data.xlsx
  210. data.ods
  211. data.html
  212. def test_book_export_no_exceptions(self):
  213. """Test that various exports don't error out."""
  214. book = tablib.Databook()
  215. book.add_sheet(data)
  216. book.json
  217. book.yaml
  218. book.xls
  219. book.xlsx
  220. book.ods
  221. def test_json_import_set(self):
  222. """Generate and import JSON set serialization."""
  223. data.append(self.john)
  224. data.append(self.george)
  225. data.headers = self.headers
  226. _json = data.json
  227. data.json = _json
  228. self.assertEqual(_json, data.json)
  229. def test_json_import_book(self):
  230. """Generate and import JSON book serialization."""
  231. data.append(self.john)
  232. data.append(self.george)
  233. data.headers = self.headers
  234. book.add_sheet(data)
  235. _json = book.json
  236. book.json = _json
  237. self.assertEqual(_json, book.json)
  238. def test_yaml_import_set(self):
  239. """Generate and import YAML set serialization."""
  240. data.append(self.john)
  241. data.append(self.george)
  242. data.headers = self.headers
  243. _yaml = data.yaml
  244. data.yaml = _yaml
  245. self.assertEqual(_yaml, data.yaml)
  246. def test_yaml_import_book(self):
  247. """Generate and import YAML book serialization."""
  248. data.append(self.john)
  249. data.append(self.george)
  250. data.headers = self.headers
  251. book.add_sheet(data)
  252. _yaml = book.yaml
  253. book.yaml = _yaml
  254. self.assertEqual(_yaml, book.yaml)
  255. def test_csv_import_set(self):
  256. """Generate and import CSV set serialization."""
  257. data.append(self.john)
  258. data.append(self.george)
  259. data.headers = self.headers
  260. _csv = data.csv
  261. data.csv = _csv
  262. self.assertEqual(_csv, data.csv)
  263. def test_csv_import_set_with_spaces(self):
  264. """Generate and import CSV set serialization when row values have
  265. spaces."""
  266. data.append(('Bill Gates', 'Microsoft'))
  267. data.append(('Steve Jobs', 'Apple'))
  268. data.headers = ('Name', 'Company')
  269. _csv = data.csv
  270. data.csv = _csv
  271. self.assertEqual(_csv, data.csv)
  272. def test_tsv_import_set(self):
  273. """Generate and import TSV set serialization."""
  274. data.append(self.john)
  275. data.append(self.george)
  276. data.headers = self.headers
  277. _tsv = data.tsv
  278. data.tsv = _tsv
  279. self.assertEqual(_tsv, data.tsv)
  280. def test_csv_format_detect(self):
  281. """Test CSV format detection."""
  282. _csv = (
  283. '1,2,3\n'
  284. '4,5,6\n'
  285. '7,8,9\n'
  286. )
  287. _bunk = (
  288. '¡¡¡¡¡¡¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
  289. )
  290. self.assertTrue(tablib.formats.csv.detect(_csv))
  291. self.assertFalse(tablib.formats.csv.detect(_bunk))
  292. def test_tsv_format_detect(self):
  293. """Test TSV format detection."""
  294. _tsv = (
  295. '1\t2\t3\n'
  296. '4\t5\t6\n'
  297. '7\t8\t9\n'
  298. )
  299. _bunk = (
  300. '¡¡¡¡¡¡¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
  301. )
  302. self.assertTrue(tablib.formats.tsv.detect(_tsv))
  303. self.assertFalse(tablib.formats.tsv.detect(_bunk))
  304. def test_json_format_detect(self):
  305. """Test JSON format detection."""
  306. _json = '[{"last_name": "Adams","age": 90,"first_name": "John"}]'
  307. _bunk = (
  308. '¡¡¡¡¡¡¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
  309. )
  310. self.assertTrue(tablib.formats.json.detect(_json))
  311. self.assertFalse(tablib.formats.json.detect(_bunk))
  312. def test_yaml_format_detect(self):
  313. """Test YAML format detection."""
  314. _yaml = '- {age: 90, first_name: John, last_name: Adams}'
  315. _tsv = 'foo\tbar'
  316. _bunk = (
  317. '¡¡¡¡¡¡---///\n\n\n¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
  318. )
  319. self.assertTrue(tablib.formats.yaml.detect(_yaml))
  320. self.assertFalse(tablib.formats.yaml.detect(_bunk))
  321. self.assertFalse(tablib.formats.yaml.detect(_tsv))
  322. def test_auto_format_detect(self):
  323. """Test auto format detection."""
  324. _yaml = '- {age: 90, first_name: John, last_name: Adams}'
  325. _json = '[{"last_name": "Adams","age": 90,"first_name": "John"}]'
  326. _csv = '1,2,3\n4,5,6\n7,8,9\n'
  327. _tsv = '1\t2\t3\n4\t5\t6\n7\t8\t9\n'
  328. _bunk = '¡¡¡¡¡¡---///\n\n\n¡¡£™∞¢£§∞§¶•¶ª∞¶•ªº••ª–º§•†•§º¶•†¥ª–º•§ƒø¥¨©πƒø†ˆ¥ç©¨√øˆ¥≈†ƒ¥ç©ø¨çˆ¥ƒçø¶'
  329. self.assertEqual(tablib.detect(_yaml)[0], tablib.formats.yaml)
  330. self.assertEqual(tablib.detect(_csv)[0], tablib.formats.csv)
  331. self.assertEqual(tablib.detect(_tsv)[0], tablib.formats.tsv)
  332. self.assertEqual(tablib.detect(_json)[0], tablib.formats.json)
  333. self.assertEqual(tablib.detect(_bunk)[0], None)
  334. def test_transpose(self):
  335. """Transpose a dataset."""
  336. transposed_founders = self.founders.transpose()
  337. first_row = transposed_founders[0]
  338. second_row = transposed_founders[1]
  339. self.assertEqual(transposed_founders.headers,
  340. ["first_name","John", "George", "Thomas"])
  341. self.assertEqual(first_row,
  342. ("last_name","Adams", "Washington", "Jefferson"))
  343. self.assertEqual(second_row,
  344. ("gpa",90, 67, 50))
  345. def test_row_stacking(self):
  346. """Row stacking."""
  347. to_join = tablib.Dataset(headers=self.founders.headers)
  348. for row in self.founders:
  349. to_join.append(row=row)
  350. row_stacked = self.founders.stack(to_join)
  351. for column in row_stacked.headers:
  352. original_data = self.founders[column]
  353. expected_data = original_data + original_data
  354. self.assertEqual(row_stacked[column], expected_data)
  355. def test_column_stacking(self):
  356. """Column stacking"""
  357. to_join = tablib.Dataset(headers=self.founders.headers)
  358. for row in self.founders:
  359. to_join.append(row=row)
  360. column_stacked = self.founders.stack_cols(to_join)
  361. for index, row in enumerate(column_stacked):
  362. original_data = self.founders[index]
  363. expected_data = original_data + original_data
  364. self.assertEqual(row, expected_data)
  365. self.assertEqual(column_stacked[0],
  366. ("John", "Adams", 90, "John", "Adams", 90))
  367. def test_sorting(self):
  368. """Sort columns."""
  369. sorted_data = self.founders.sort(col="first_name")
  370. self.assertEqual(sorted_data.title, 'Founders')
  371. first_row = sorted_data[0]
  372. second_row = sorted_data[2]
  373. third_row = sorted_data[1]
  374. expected_first = self.founders[1]
  375. expected_second = self.founders[2]
  376. expected_third = self.founders[0]
  377. self.assertEqual(first_row, expected_first)
  378. self.assertEqual(second_row, expected_second)
  379. self.assertEqual(third_row, expected_third)
  380. def test_wipe(self):
  381. """Purge a dataset."""
  382. new_row = (1, 2, 3)
  383. data.append(new_row)
  384. # Verify width/data
  385. self.assertTrue(data.width == len(new_row))
  386. self.assertTrue(data[0] == new_row)
  387. data.wipe()
  388. new_row = (1, 2, 3, 4)
  389. data.append(new_row)
  390. self.assertTrue(data.width == len(new_row))
  391. self.assertTrue(data[0] == new_row)
  392. def test_formatters(self):
  393. """Confirm formatters are being triggered."""
  394. def _formatter(cell_value):
  395. return str(cell_value).upper()
  396. self.founders.add_formatter('last_name', _formatter)
  397. for name in [r['last_name'] for r in self.founders.dict]:
  398. self.assertTrue(name.isupper())
  399. def test_unicode_csv(self):
  400. """Check if unicode in csv export doesn't raise."""
  401. data = tablib.Dataset()
  402. if sys.version_info[0] > 2:
  403. data.append(['\xfc', '\xfd'])
  404. else:
  405. exec("data.append([u'\xfc', u'\xfd'])")
  406. data.csv
  407. def test_csv_column_select(self):
  408. """Build up a CSV and test selecting a column"""
  409. data = tablib.Dataset()
  410. data.csv = self.founders.csv
  411. headers = data.headers
  412. self.assertTrue(isinstance(headers[0], unicode))
  413. orig_first_name = self.founders[self.headers[0]]
  414. csv_first_name = data[headers[0]]
  415. self.assertEquals(orig_first_name, csv_first_name)
  416. def test_csv_column_delete(self):
  417. """Build up a CSV and test deleting a column"""
  418. data = tablib.Dataset()
  419. data.csv = self.founders.csv
  420. target_header = data.headers[0]
  421. self.assertTrue(isinstance(target_header, unicode))
  422. del data[target_header]
  423. self.assertTrue(target_header not in data.headers)
  424. def test_csv_column_sort(self):
  425. """Build up a CSV and test sorting a column by name"""
  426. data = tablib.Dataset()
  427. data.csv = self.founders.csv
  428. orig_target_header = self.founders.headers[1]
  429. target_header = data.headers[1]
  430. self.founders.sort(orig_target_header)
  431. data.sort(target_header)
  432. self.assertEquals(self.founders[orig_target_header], data[target_header])
  433. def test_unicode_renders_markdown_table(self):
  434. # add another entry to test right field width for
  435. # integer
  436. self.founders.append(('Old', 'Man', 100500))
  437. self.assertEquals(
  438. """
  439. first_name|last_name |gpa
  440. ----------|----------|------
  441. John |Adams |90
  442. George |Washington|67
  443. Thomas |Jefferson |50
  444. Old |Man |100500
  445. """.strip(),
  446. unicode(self.founders)
  447. )
  448. def test_databook_add_sheet_accepts_only_dataset_instances(self):
  449. class NotDataset(object):
  450. def append(self, item):
  451. pass
  452. dataset = NotDataset()
  453. dataset.append(self.john)
  454. self.assertRaises(tablib.InvalidDatasetType, book.add_sheet, dataset)
  455. def test_databook_add_sheet_accepts_dataset_subclasses(self):
  456. class DatasetSubclass(tablib.Dataset):
  457. pass
  458. # just checking if subclass of tablib.Dataset can be added to Databook
  459. dataset = DatasetSubclass()
  460. dataset.append(self.john)
  461. dataset.append(self.tom)
  462. try:
  463. book.add_sheet(dataset)
  464. except tablib.InvalidDatasetType:
  465. self.fail("Subclass of tablib.Dataset should be accepted by Databook.add_sheet")
  466. if __name__ == '__main__':
  467. unittest.main()