csv_parser.py 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122
  1. # type: ignore
  2. from typing import IO, AsyncGenerator, Optional
  3. from core.base.parsers.base_parser import AsyncParser
  4. from core.base.providers import (
  5. CompletionProvider,
  6. DatabaseProvider,
  7. IngestionConfig,
  8. )
  9. class CSVParser(AsyncParser[str | bytes]):
  10. """A parser for CSV data."""
  11. def __init__(
  12. self,
  13. config: IngestionConfig,
  14. database_provider: DatabaseProvider,
  15. llm_provider: CompletionProvider,
  16. ):
  17. self.database_provider = database_provider
  18. self.llm_provider = llm_provider
  19. self.config = config
  20. import csv
  21. from io import StringIO
  22. self.csv = csv
  23. self.StringIO = StringIO
  24. async def ingest(
  25. self, data: str | bytes, *args, **kwargs
  26. ) -> AsyncGenerator[str, None]:
  27. """Ingest CSV data and yield text from each row."""
  28. if isinstance(data, bytes):
  29. data = data.decode("utf-8")
  30. csv_reader = self.csv.reader(self.StringIO(data))
  31. for row in csv_reader:
  32. yield ", ".join(row)
  33. class CSVParserAdvanced(AsyncParser[str | bytes]):
  34. """A parser for CSV data."""
  35. def __init__(
  36. self, config: IngestionConfig, database_provider: DatabaseProvider, llm_provider: CompletionProvider
  37. ):
  38. self.database_provider = database_provider
  39. self.llm_provider = llm_provider
  40. self.config = config
  41. import csv
  42. from io import StringIO
  43. self.csv = csv
  44. self.StringIO = StringIO
  45. def get_delimiter(
  46. self, file_path: Optional[str] = None, file: Optional[IO[bytes]] = None
  47. ):
  48. sniffer = self.csv.Sniffer()
  49. num_bytes = 65536
  50. if file:
  51. lines = file.readlines(num_bytes)
  52. file.seek(0)
  53. data = "\n".join(ln.decode("utf-8") for ln in lines)
  54. elif file_path is not None:
  55. with open(file_path) as f:
  56. data = "\n".join(f.readlines(num_bytes))
  57. return sniffer.sniff(data, delimiters=",;").delimiter
  58. async def ingest(
  59. self,
  60. data: str | bytes,
  61. num_col_times_num_rows: int = 100,
  62. *args,
  63. **kwargs,
  64. ) -> AsyncGenerator[str, None]:
  65. """Ingest CSV data and yield text from each row."""
  66. print(data)
  67. if isinstance(data, bytes):
  68. try:
  69. data = data.decode("utf-8")
  70. except UnicodeDecodeError:
  71. # 尝试其他常见编码
  72. for encoding in ['latin-1', 'cp1252', 'iso-8859-1']:
  73. try:
  74. data = data.decode(encoding)
  75. break
  76. except UnicodeDecodeError:
  77. continue
  78. else:
  79. raise ValueError("Unable to decode the provided byte data with any supported encoding")
  80. # let the first row be the header
  81. delimiter = self.get_delimiter(file=self.StringIO(data))
  82. csv_reader = self.csv.reader(self.StringIO(data), delimiter=delimiter)
  83. header = next(csv_reader)
  84. num_cols = len(header.split(delimiter))
  85. num_rows = num_col_times_num_rows // num_cols
  86. chunk_rows = []
  87. for row_num, row in enumerate(csv_reader):
  88. print(row)
  89. chunk_rows.append(row)
  90. if row_num % num_rows == 0:
  91. yield (
  92. ", ".join(header)
  93. + "\n"
  94. + "\n".join([", ".join(row) for row in chunk_rows])
  95. )
  96. chunk_rows = []
  97. if chunk_rows:
  98. yield (
  99. ", ".join(header)
  100. + "\n"
  101. + "\n".join([", ".join(row) for row in chunk_rows])
  102. )