Cant compose elements in python postgre sql statement

Question:

I am trying to generate some sql for a python update script I want to make it so it reads in a json file and then extracts columns name

 def insert_payout(payout, items, node_name):#TODO add data source in pycharm

     vals = []
     nftDict = json.loads(nft)
     nftList = list(nftDict.keys())
     table = 'consortia.public.' + node_name
col_list = []

     for i in range(len(nftList)):
         col_list.append(str({nftList[i]}))
         vals.append(payout)

     base_sql = sql.SQL(
         '''INSERT INTO {} ({}) 
         VALUES ({})
         '''). 
         format(sql.Identifier(table),
                sql.SQL(", ").join(map(sql.Identifier, col_list)),
                sql.SQL(', ').join(sql.Placeholder() * len(col_list))
           )
     

      conn = None

      try:
          # read database configuration
         params = setparams()
         # connect to the PostgreSQL database
          conn = psycopg2.connect(**params)
         # create a new cursor
          cur = conn.cursor()
         # execute the INSERT statement
         #cur.execute(base_sql, vals)
         # commit the changes to the database
         conn.commit()
         # close communication with the database
          cur.close()
      except (Exception, psycopg2.DatabaseError) as error:
         print(error)
     finally:
         if conn is not None:
             conn.close()


  if __name__ == '__main__':
          
         insert_payout(7.4, item_json, 'test_write_table')

Error is

 TypeError: Composed elements must be Composable, got 'N' instead

New Error is:

  "ItemUniqueName31": [
    "Item_ID_Number31",
    "<something31>",
    "BooleanActive"
],
"ItemUniqueName32": [
    "Item_ID_Number32",
    "<something32>",
    "BooleanActive"
]
 }


 dict is not a sequence

Newest error

 relation "consortia.public.test_write_table" does not exist

LINE 1: INSERT INTO "consortia.public.test_write_table" ("{‘ItemUniqu…
^

And I am not adding the operator correctly I used this answer as a guide once I get this working I am going to work in the vals

Asked By: Kyle Sponable

||

Answers:

Simpler solution:

Updated to deal with schema qualifying table name.

# Make table name a tuple for later unpacking in sql.Identifier to
# build schema qualified name.
table = ("public", "test_table")
colList = ['test', 'test2', 'test3']
base_sql = sql.SQL(
         '''INSERT INTO {} ({}) 
         VALUES ({})
         '''). 
         format(sql.Identifier(*table), # * unpack tuple to get schema qualification.
                sql.SQL(", ").join(map(sql.Identifier, colList)),
                sql.SQL(', ').join(sql.Placeholder() * len(colList))
                )

print(base_sql.as_string(con))                                                                                                                                        
INSERT INTO "public"."test_table" ("test", "test2", "test3") 
         VALUES (%s, %s, %s)

UPDATE

To illustrate what I mean by passing the values in correctly, as discussed in comments below:

d test_table 
                  Table "public.test_table"
 Column |       Type        | Collation | Nullable | Default 
--------+-------------------+-----------+----------+---------
 test   | integer           |           |          | 
 test2  | character varying |           |          | 
 test3  | boolean           |           |          | 

vals = [1, 'dog', False]

cur.execute(base_sql, vals)    

con.commit()

select * from test_table ;
 test | test2 | test3 
------+-------+-------
    1 | dog   | f



Answered By: Adrian Klaver
Categories: questions Tags: , ,
Answers are sorted by their score. The answer accepted by the question owner as the best is marked with
at the top-right corner.