home / github / issues

Menu
  • GraphQL API

issues: 1436539554

This data as json

id node_id number title user state locked assignee milestone comments created_at updated_at closed_at author_association pull_request body repo type active_lock_reason performed_via_github_app reactions draft state_reason
1436539554 I_kwDOCGYnMM5Vn9qi 511 [insert_all, upsert_all] IntegrityError: constraint failed 7908073 closed 0     2 2022-11-04T19:21:48Z 2022-11-04T22:59:54Z 2022-11-04T22:54:09Z CONTRIBUTOR   My understand is that `INSERT OR IGNORE` will ignore when inserts would cause duplicate keys so I'm not sure exactly why the error is raised from `sqlite3`. ``` import argparse from pathlib import Path from xklb import db, utils from xklb.utils import log def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser() parser.add_argument("database") parser.add_argument("dbs", nargs="*") parser.add_argument("--upsert") parser.add_argument("--db", "-db", help=argparse.SUPPRESS) parser.add_argument("--verbose", "-v", action="count", default=0) args = parser.parse_args() if args.db: args.database = args.db Path(args.database).touch() args.db = db.connect(args) log.info(utils.dict_filter_bool(args.__dict__)) return args def merge_db(args, source_db): source_db = str(Path(source_db).resolve()) s_db = db.connect(argparse.Namespace(database=source_db, verbose=args.verbose)) for table in [s for s in s_db.table_names() if not "_fts" in s and not s.startswith("sqlite_")]: log.info("[%s]: %s", source_db, table) with s_db.conn: data = s_db[table].rows with args.db.conn: if args.upsert: args.db[table].upsert_all(data, pk=args.upsert.split(","), alter=True) else: args.db[table].insert_all(data, alter=True, replace=True) def merge_dbs(): args = parse_args() for s_db in args.dbs: merge_db(args, s_db) if __name__ == "__main__": merge_dbs() ``` ``` $ lb-dev merge video.db tube_71.db --upsert path -vv SQL: INSERT OR IGNORE INTO [media]([path]) VALUES(?); - params: ['https://archive.org/details/088ghostofachanceroygetssackedrevengeofthelivinglunchdvdripxvidphz'] ... File ~/.local/lib/python3.10/site-packages/sqlite_utils/db.py:3122, in Table.insert_all(self, records, pk, foreign_keys, column_order, not_null, defaults, batch_size, hash_id, hash_id_columns, alter, ignore, replace, truncate, extracts, conversions, columns, upsert, analyze) 3116 all_columns += [ 3117 column for column in record if column not in all_columns 3118 ] 3120 first = False -> 3122 self.insert_chunk( 3123 alter, 3124 extracts, 3125 chunk, 3126 all_columns, 3127 hash_id, 3128 hash_id_columns, 3129 upsert, 3130 pk, 3131 conversions, 3132 num_records_processed, 3133 replace, 3134 ignore, 3135 ) 3137 if analyze: 3138 self.analyze() File ~/.local/lib/python3.10/site-packages/sqlite_utils/db.py:2887, in Table.insert_chunk(self, alter, extracts, chunk, all_columns, hash_id, hash_id_columns, upsert, pk, conversions, num_records_processed, replace, ignore) 2885 for query, params in queries_and_params: 2886 try: -> 2887 result = self.db.execute(query, params) 2888 except OperationalError as e: 2889 if alter and (" column" in e.args[0]): 2890 # Attempt to add any missing columns, then try again File ~/.local/lib/python3.10/site-packages/sqlite_utils/db.py:484, in Database.execute(self, sql, parameters) 482 self._tracer(sql, parameters) 483 if parameters is not None: --> 484 return self.conn.execute(sql, parameters) 485 else: 486 return self.conn.execute(sql) IntegrityError: constraint failed > /home/xk/.local/lib/python3.10/site-packages/sqlite_utils/db.py(484)execute() 482 self._tracer(sql, parameters) 483 if parameters is not None: --> 484 return self.conn.execute(sql, parameters) 485 else: 486 return self.conn.execute(sql) ``` ``` sqlite3 --version 3.36.0 2021-06-18 18:36:39 ``` 140912432 issue     {"url": "https://api.github.com/repos/simonw/sqlite-utils/issues/511/reactions", "total_count": 0, "+1": 0, "-1": 0, "laugh": 0, "hooray": 0, "confused": 0, "heart": 0, "rocket": 0, "eyes": 0}   completed

Links from other tables

  • 0 rows from issues_id in issues_labels
  • 2 rows from issue in issue_comments
Powered by Datasette · Queries took 2.447ms