Now, with comments! 😉
$filename = "test.csv";
$fp = fopen($filename,"r");
$conn = pg_connect("dbname=dbname user=username");
# Note that by using a transaction, either all the rows get imported, or none of them.
# Should the import fail partway through, no data in the table will be changed, since we
# are doing simple all or nothing transactions. Note that while some databases like
# Oracle have limits to transaction size, postgresql itself has no such limit, but
# there is a price to be paid in dead tuples that need to be harvested. See the
# vacuum at the bottom of the script
pg_query("begin");
while (!feof($fp)){
# here we use the built in fgetcsv function
# which grabs a line from a csv file, parses
# it, and returns an array
$row = fgetcsv($fp,100000);
# Now we create an insert query to put the data
# we just got into the database. Our query should
# be of the form:
# insert into tablename (field1, field2) values ('value1','value2');
$query = "insert into test (d, f1, f2, f3) values ('";
# Here we use implode to put a ',' between each field.
$query.= implode("','",$row);
# right now, our query looks kinda like this:
# insert into tablename (field1, field2) values ('value1','value2
# So we just close it here to add the missing ') on the end.
$query.= "')";
pg_query($query);
}
pg_query("commit");
# clean up the database after ourselves.
pg_query("vacuum full");