Когда я использую psycopg2 с Execute_values, я могу записать пакет из 1000 строк за 0,1–0,15 секунды.
Код: Выделить всё
from psycopg2.extras import execute_values
self.engine = create_engine(f'postgresql+psycopg2://postgres:password@localhost/postgres', pool_size=DB_POOL_SIZE,max_overflow=20)
def insert_data_todb(self, table_name, batch_data):
try:
t1 = time.perf_counter()
insert_sql = f"""INSERT INTO {table_name} ({self._market_snapshot_columns_str}) VALUES %s;"""
with self.engine.connect() as conn, conn.connection.cursor() as cur:
execute_values(cur, insert_sql, batch_data)
t2 = time.perf_counter()
logger.info(f"Inserted {len(batch_data)} records in {t2 - t1} seconds")
except Exception as ex:
logger.error(f"Error inserting batch data into {table_name}:")
logger.exception(ex)
< /code>
i удалял psycopg2 и установил psycopg 3.2. и использовал исполнительную функцию Psycopg3, как это: < /p>
import psycopg
self.engine = create_engine(f'postgresql+psycopg://postgres:password@localhost/postgres', pool_size=DB_POOL_SIZE,max_overflow=20)
def insert_data_todb(self, table_name, batch_data):
try:
t1 = time.perf_counter()
placeholders = ', '.join(['%s'] * len(batch_data[0]))
insert_sql = f"""INSERT INTO {table_name} ({self._market_snapshot_columns_str}) VALUES ({placeholders});""" # stored variable
with self.engine.connect() as conn:
with conn.cursor() as cur:
cur.executemany(insert_sql, batch_data) # Pass the batch data directly
t2 = time.perf_counter()
logger.info(f"Inserted {len(batch_data)} records in {t2 - t1} seconds")
except Exception as ex:
logger.error(f"Error inserting batch data into {table_name}:")
logger.exception(ex)
Подробнее здесь: https://stackoverflow.com/questions/793 ... s-psycopg3