csv_parser.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111
  1. # type: ignore
  2. from typing import IO, AsyncGenerator, Optional
  3. from core.base.parsers.base_parser import AsyncParser
  4. from core.base.providers import (
  5. CompletionProvider,
  6. DatabaseProvider,
  7. IngestionConfig,
  8. )
  9. class CSVParser(AsyncParser[str | bytes]):
  10. """A parser for CSV data."""
  11. def __init__(
  12. self,
  13. config: IngestionConfig,
  14. database_provider: DatabaseProvider,
  15. llm_provider: CompletionProvider,
  16. ):
  17. self.database_provider = database_provider
  18. self.llm_provider = llm_provider
  19. self.config = config
  20. import csv
  21. from io import StringIO
  22. self.csv = csv
  23. self.StringIO = StringIO
  24. async def ingest(
  25. self, data: str | bytes, *args, **kwargs
  26. ) -> AsyncGenerator[str, None]:
  27. """Ingest CSV data and yield text from each row."""
  28. print(data)
  29. if isinstance(data, bytes):
  30. data = data.decode("utf-8")
  31. csv_reader = self.csv.reader(self.StringIO(data))
  32. for row in csv_reader:
  33. yield ", ".join(row)
  34. class CSVParserAdvanced(AsyncParser[str | bytes]):
  35. """A parser for CSV data."""
  36. def __init__(
  37. self, config: IngestionConfig, database_provider: DatabaseProvider, llm_provider: CompletionProvider
  38. ):
  39. self.database_provider = database_provider
  40. self.llm_provider = llm_provider
  41. self.config = config
  42. import csv
  43. from io import StringIO
  44. self.csv = csv
  45. self.StringIO = StringIO
  46. def get_delimiter(
  47. self, file_path: Optional[str] = None, file: Optional[IO[bytes]] = None
  48. ):
  49. sniffer = self.csv.Sniffer()
  50. num_bytes = 65536
  51. if file:
  52. lines = file.readlines(num_bytes)
  53. file.seek(0)
  54. data = "\n".join(ln.decode("utf-8") for ln in lines)
  55. elif file_path is not None:
  56. with open(file_path) as f:
  57. data = "\n".join(f.readlines(num_bytes))
  58. return sniffer.sniff(data, delimiters=",;").delimiter
  59. async def ingest(
  60. self,
  61. data: str | bytes,
  62. num_col_times_num_rows: int = 100,
  63. *args,
  64. **kwargs,
  65. ) -> AsyncGenerator[str, None]:
  66. """Ingest CSV data and yield text from each row."""
  67. if isinstance(data, bytes):
  68. data = data.decode("utf-8")
  69. # let the first row be the header
  70. delimiter = self.get_delimiter(file=self.StringIO(data))
  71. csv_reader = self.csv.reader(self.StringIO(data), delimiter=delimiter)
  72. header = next(csv_reader)
  73. num_cols = len(header.split(delimiter))
  74. num_rows = num_col_times_num_rows // num_cols
  75. chunk_rows = []
  76. for row_num, row in enumerate(csv_reader):
  77. print(row)
  78. chunk_rows.append(row)
  79. if row_num % num_rows == 0:
  80. yield (
  81. ", ".join(header)
  82. + "\n"
  83. + "\n".join([", ".join(row) for row in chunk_rows])
  84. )
  85. chunk_rows = []
  86. if chunk_rows:
  87. yield (
  88. ", ".join(header)
  89. + "\n"
  90. + "\n".join([", ".join(row) for row in chunk_rows])
  91. )